mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-12 00:18:09 -05:00
Compare commits
11 Commits
improvemen
...
feat/while
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f450c4f8e | ||
|
|
7e23e942d7 | ||
|
|
7fcbafab97 | ||
|
|
056dc2879c | ||
|
|
1aec32b7e2 | ||
|
|
4e3a3bd1b1 | ||
|
|
36773e8cdb | ||
|
|
faa094195a | ||
|
|
69319d21cd | ||
|
|
8362fd7a83 | ||
|
|
39ad793a9a |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
node-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Run tests with coverage
|
||||
env:
|
||||
|
||||
@@ -33,15 +33,12 @@
|
||||
"microsoft_planner",
|
||||
"microsoft_teams",
|
||||
"mistral_parse",
|
||||
"mysql",
|
||||
"notion",
|
||||
"onedrive",
|
||||
"openai",
|
||||
"outlook",
|
||||
"parallel_ai",
|
||||
"perplexity",
|
||||
"pinecone",
|
||||
"postgresql",
|
||||
"qdrant",
|
||||
"reddit",
|
||||
"s3",
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
---
|
||||
title: MySQL
|
||||
description: Connect to MySQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mysql"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
|
||||
|
||||
viewBox='0 0 25.6 25.6'
|
||||
>
|
||||
<path
|
||||
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
|
||||
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
|
||||
fillRule='evenodd'
|
||||
fill='#00678c'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [MySQL](https://www.mysql.com/) tool enables you to connect to any MySQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the MySQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your MySQL tables using the `mysql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `mysql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `mysql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `mysql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `mysql_execute` operation for advanced use cases.
|
||||
|
||||
The MySQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your MySQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `mysql_query`
|
||||
|
||||
Execute SELECT query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `mysql_insert`
|
||||
|
||||
Insert new record into MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert into |
|
||||
| `data` | object | Yes | Data to insert as key-value pairs |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of inserted rows |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `mysql_update`
|
||||
|
||||
Update existing records in MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update |
|
||||
| `data` | object | Yes | Data to update as key-value pairs |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of updated rows |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `mysql_delete`
|
||||
|
||||
Delete records from MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of deleted rows |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `mysql_execute`
|
||||
|
||||
Execute raw SQL query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `mysql`
|
||||
@@ -1,106 +0,0 @@
|
||||
---
|
||||
title: Parallel AI
|
||||
description: Search with Parallel AI
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="parallel_ai"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
fill='currentColor'
|
||||
|
||||
|
||||
viewBox='0 0 271 270'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Parallel AI](https://parallel.ai/) is an advanced web search and content extraction platform designed to deliver comprehensive, high-quality results for any query. By leveraging intelligent processing and large-scale data extraction, Parallel AI enables users and agents to access, analyze, and synthesize information from across the web with speed and accuracy.
|
||||
|
||||
With Parallel AI, you can:
|
||||
|
||||
- **Search the web intelligently**: Retrieve relevant, up-to-date information from a wide range of sources
|
||||
- **Extract and summarize content**: Get concise, meaningful excerpts from web pages and documents
|
||||
- **Customize search objectives**: Tailor queries to specific needs or questions for targeted results
|
||||
- **Process results at scale**: Handle large volumes of search results with advanced processing options
|
||||
- **Integrate with workflows**: Use Parallel AI within Sim to automate research, content gathering, and knowledge extraction
|
||||
- **Control output granularity**: Specify the number of results and the amount of content per result
|
||||
- **Secure API access**: Protect your searches and data with API key authentication
|
||||
|
||||
In Sim, the Parallel AI integration empowers your agents to perform web searches and extract content programmatically. This enables powerful automation scenarios such as real-time research, competitive analysis, content monitoring, and knowledge base creation. By connecting Sim with Parallel AI, you unlock the ability for agents to gather, process, and utilize web data as part of your automated workflows.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `parallel_search`
|
||||
|
||||
Search the web using Parallel AI. Provides comprehensive search results with intelligent processing and content extraction.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `objective` | string | Yes | The search objective or question to answer |
|
||||
| `search_queries` | string | No | Optional comma-separated list of search queries to execute |
|
||||
| `processor` | string | No | Processing method: base or pro \(default: base\) |
|
||||
| `max_results` | number | No | Maximum number of results to return \(default: 5\) |
|
||||
| `max_chars_per_result` | number | No | Maximum characters per result \(default: 1500\) |
|
||||
| `apiKey` | string | Yes | Parallel AI API Key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Search results with excerpts from relevant pages |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `parallel_ai`
|
||||
@@ -1,188 +0,0 @@
|
||||
---
|
||||
title: PostgreSQL
|
||||
description: Connect to PostgreSQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="postgresql"
|
||||
color="#336791"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
|
||||
|
||||
viewBox='-4 0 264 264'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMinYMin meet'
|
||||
>
|
||||
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
|
||||
<path
|
||||
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
|
||||
fill='#336791'
|
||||
/>
|
||||
<path
|
||||
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
|
||||
fill='#FFF'
|
||||
/>
|
||||
<path
|
||||
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
|
||||
fill='#FFF'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [PostgreSQL](https://www.postgresql.org/) tool enables you to connect to any PostgreSQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the PostgreSQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your PostgreSQL tables using the `postgresql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `postgresql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `postgresql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `postgresql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `postgresql_execute` operation for advanced use cases.
|
||||
|
||||
The PostgreSQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your PostgreSQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `postgresql_query`
|
||||
|
||||
Execute a SELECT query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `postgresql_insert`
|
||||
|
||||
Insert data into PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert data into |
|
||||
| `data` | object | Yes | Data object to insert \(key-value pairs\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Inserted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `postgresql_update`
|
||||
|
||||
Update data in PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update data in |
|
||||
| `data` | object | Yes | Data object with fields to update \(key-value pairs\) |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Updated data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `postgresql_delete`
|
||||
|
||||
Delete data from PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete data from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Deleted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `postgresql_execute`
|
||||
|
||||
Execute raw SQL query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `postgresql`
|
||||
@@ -143,6 +143,7 @@ export const sampleWorkflowState = {
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
}
|
||||
|
||||
@@ -420,7 +420,7 @@ export async function executeWorkflowForChat(
|
||||
|
||||
// Use deployed state for chat execution (this is the stable, deployed version)
|
||||
const deployedState = workflowResult[0].deployedState as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = deployedState
|
||||
const { blocks, edges, loops, parallels, whiles } = deployedState
|
||||
|
||||
// Prepare for execution, similar to use-workflow-execution.ts
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
@@ -497,6 +497,7 @@ export async function executeWorkflowForChat(
|
||||
filteredEdges,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
true // Enable validation during execution
|
||||
)
|
||||
|
||||
|
||||
@@ -71,6 +71,7 @@ export async function POST(request: NextRequest) {
|
||||
edges: checkpointState?.edges || [],
|
||||
loops: checkpointState?.loops || {},
|
||||
parallels: checkpointState?.parallels || {},
|
||||
whiles: checkpointState?.whiles || {},
|
||||
isDeployed: checkpointState?.isDeployed || false,
|
||||
deploymentStatuses: checkpointState?.deploymentStatuses || {},
|
||||
hasActiveWebhook: checkpointState?.hasActiveWebhook || false,
|
||||
|
||||
@@ -23,6 +23,7 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
edges: sampleWorkflowState.edges || [],
|
||||
loops: sampleWorkflowState.loops || {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -230,6 +230,7 @@ export async function GET() {
|
||||
const edges = normalizedData.edges
|
||||
const loops = normalizedData.loops
|
||||
const parallels = normalizedData.parallels
|
||||
const whiles = normalizedData.whiles
|
||||
logger.info(
|
||||
`[${requestId}] Loaded scheduled workflow ${schedule.workflowId} from normalized tables`
|
||||
)
|
||||
@@ -384,6 +385,7 @@ export async function GET() {
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
true // Enable validation during execution
|
||||
)
|
||||
|
||||
|
||||
@@ -68,6 +68,7 @@ const CreateTemplateSchema = z.object({
|
||||
edges: z.array(z.any()),
|
||||
loops: z.record(z.any()),
|
||||
parallels: z.record(z.any()),
|
||||
whiles: z.record(z.any()),
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL delete failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL execute failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL insert failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing MySQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL update failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL update failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
import mysql from 'mysql2/promise'
|
||||
|
||||
export interface MySQLConnectionConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
username: string
|
||||
password: string
|
||||
ssl?: string
|
||||
}
|
||||
|
||||
export async function createMySQLConnection(config: MySQLConnectionConfig) {
|
||||
const connectionConfig: mysql.ConnectionOptions = {
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
}
|
||||
|
||||
// Handle SSL configuration
|
||||
if (config.ssl === 'required') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: true }
|
||||
} else if (config.ssl === 'preferred') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: false }
|
||||
}
|
||||
// For 'disabled', we don't set the ssl property at all
|
||||
|
||||
return mysql.createConnection(connectionConfig)
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
connection: mysql.Connection,
|
||||
query: string,
|
||||
values?: unknown[]
|
||||
) {
|
||||
const [rows, fields] = await connection.execute(query, values)
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
return {
|
||||
rows: rows as unknown[],
|
||||
rowCount: rows.length,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rows: [],
|
||||
rowCount: (rows as mysql.ResultSetHeader).affectedRows || 0,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/set\s+global/i,
|
||||
/set\s+session/i,
|
||||
/load\s+data/i,
|
||||
/into\s+outfile/i,
|
||||
/into\s+dumpfile/i,
|
||||
/load_file\s*\(/i,
|
||||
/system\s+/i,
|
||||
/exec\s+/i,
|
||||
/execute\s+immediate/i,
|
||||
/xp_cmdshell/i,
|
||||
/sp_configure/i,
|
||||
/information_schema\.tables/i,
|
||||
/mysql\.user/i,
|
||||
/mysql\.db/i,
|
||||
/mysql\.host/i,
|
||||
/performance_schema/i,
|
||||
/sys\./i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|show|describe|explain)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, SHOW, DESCRIBE, and EXPLAIN statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function buildInsertQuery(table: string, data: Record<string, unknown>) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
const placeholders = columns.map(() => '?').join(', ')
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${columns.map(sanitizeIdentifier).join(', ')}) VALUES (${placeholders})`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(table: string, data: Record<string, unknown>, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
|
||||
const setClause = columns.map((col) => `${sanitizeIdentifier(col)} = ?`).join(', ')
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where}`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(table: string, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where}`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing backticks to prevent double-escaping
|
||||
const cleaned = identifier.replace(/`/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in backticks for MySQL
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildDeleteQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL delete failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
validateQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL execute failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildInsertQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
// Debug: Log the data field to see what we're getting
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL insert failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing PostgreSQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `PostgreSQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildUpdateQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL update failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL update failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
import { Client } from 'pg'
|
||||
import type { PostgresConnectionConfig } from '@/tools/postgresql/types'
|
||||
|
||||
export async function createPostgresConnection(config: PostgresConnectionConfig): Promise<Client> {
|
||||
const client = new Client({
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
ssl:
|
||||
config.ssl === 'disabled'
|
||||
? false
|
||||
: config.ssl === 'required'
|
||||
? true
|
||||
: config.ssl === 'preferred'
|
||||
? { rejectUnauthorized: false }
|
||||
: false,
|
||||
connectionTimeoutMillis: 10000, // 10 seconds
|
||||
query_timeout: 30000, // 30 seconds
|
||||
})
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
return client
|
||||
} catch (error) {
|
||||
await client.end()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
client: Client,
|
||||
query: string,
|
||||
params: unknown[] = []
|
||||
): Promise<{ rows: unknown[]; rowCount: number }> {
|
||||
const result = await client.query(query, params)
|
||||
return {
|
||||
rows: result.rows || [],
|
||||
rowCount: result.rowCount || 0,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/create\s+role/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/alter\s+role/i,
|
||||
/set\s+role/i,
|
||||
/reset\s+role/i,
|
||||
/copy\s+.*from/i,
|
||||
/copy\s+.*to/i,
|
||||
/lo_import/i,
|
||||
/lo_export/i,
|
||||
/pg_read_file/i,
|
||||
/pg_write_file/i,
|
||||
/pg_ls_dir/i,
|
||||
/information_schema\.tables/i,
|
||||
/pg_catalog/i,
|
||||
/pg_user/i,
|
||||
/pg_shadow/i,
|
||||
/pg_roles/i,
|
||||
/pg_authid/i,
|
||||
/pg_stat_activity/i,
|
||||
/dblink/i,
|
||||
/\\\\copy/i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|explain|analyze|show)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, EXPLAIN, ANALYZE, and SHOW statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing double quotes to prevent double-escaping
|
||||
const cleaned = identifier.replace(/"/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in double quotes for PostgreSQL
|
||||
return `"${cleaned}"`
|
||||
}
|
||||
|
||||
export function buildInsertQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const placeholders = columns.map((_, index) => `$${index + 1}`)
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${sanitizedColumns.join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const setClause = sanitizedColumns.map((col, index) => `${col} = $${index + 1}`).join(', ')
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(
|
||||
table: string,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'edge'
|
||||
export const runtime = 'nodejs'
|
||||
export const maxDuration = 60
|
||||
|
||||
const logger = createLogger('WandGenerateAPI')
|
||||
@@ -49,6 +49,15 @@ interface RequestBody {
|
||||
history?: ChatMessage[]
|
||||
}
|
||||
|
||||
// Helper: safe stringify for error payloads that may include circular structures
|
||||
function safeStringify(value: unknown): string {
|
||||
try {
|
||||
return JSON.stringify(value)
|
||||
} catch {
|
||||
return '[unserializable]'
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
logger.info(`[${requestId}] Received wand generation request`)
|
||||
@@ -110,124 +119,172 @@ export async function POST(req: NextRequest) {
|
||||
`[${requestId}] About to create stream with model: ${useWandAzure ? wandModelName : 'gpt-4o'}`
|
||||
)
|
||||
|
||||
// Add AbortController with timeout
|
||||
const abortController = new AbortController()
|
||||
const timeoutId = setTimeout(() => {
|
||||
abortController.abort('Stream timeout after 30 seconds')
|
||||
}, 30000)
|
||||
// Use native fetch for streaming to avoid OpenAI SDK issues with Node.js runtime
|
||||
const apiUrl = useWandAzure
|
||||
? `${azureEndpoint}/openai/deployments/${wandModelName}/chat/completions?api-version=${azureApiVersion}`
|
||||
: 'https://api.openai.com/v1/chat/completions'
|
||||
|
||||
// Forward request abort signal if available
|
||||
req.signal?.addEventListener('abort', () => {
|
||||
abortController.abort('Request cancelled by client')
|
||||
})
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
const streamCompletion = await client.chat.completions.create(
|
||||
{
|
||||
if (useWandAzure) {
|
||||
headers['api-key'] = azureApiKey!
|
||||
} else {
|
||||
headers.Authorization = `Bearer ${openaiApiKey}`
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Making streaming request to: ${apiUrl}`)
|
||||
|
||||
const response = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
messages: messages,
|
||||
temperature: 0.3,
|
||||
max_tokens: 10000,
|
||||
stream: true,
|
||||
stream_options: { include_usage: true },
|
||||
},
|
||||
{
|
||||
signal: abortController.signal, // Add AbortSignal
|
||||
}
|
||||
)
|
||||
|
||||
clearTimeout(timeoutId) // Clear timeout after successful creation
|
||||
logger.info(`[${requestId}] Stream created successfully, starting reader pattern`)
|
||||
|
||||
logger.debug(`[${requestId}] Stream connection established successfully`)
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Starting streaming with timeout protection`)
|
||||
let chunkCount = 0
|
||||
let hasUsageData = false
|
||||
|
||||
// Use for await with AbortController timeout protection
|
||||
for await (const chunk of streamCompletion) {
|
||||
chunkCount++
|
||||
|
||||
if (chunkCount === 1) {
|
||||
logger.info(`[${requestId}] Received first chunk via for await`)
|
||||
}
|
||||
|
||||
// Process the chunk
|
||||
const content = chunk.choices?.[0]?.delta?.content || ''
|
||||
if (content) {
|
||||
// Use SSE format identical to chat streaming
|
||||
controller.enqueue(
|
||||
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
|
||||
)
|
||||
}
|
||||
|
||||
// Check for usage data
|
||||
if (chunk.usage) {
|
||||
hasUsageData = true
|
||||
logger.info(
|
||||
`[${requestId}] Received usage data: ${JSON.stringify(chunk.usage)}`
|
||||
)
|
||||
}
|
||||
|
||||
// Log every 5th chunk to avoid spam
|
||||
if (chunkCount % 5 === 0) {
|
||||
logger.debug(`[${requestId}] Processed ${chunkCount} chunks so far`)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Reader pattern completed. Total chunks: ${chunkCount}, Usage data received: ${hasUsageData}`
|
||||
)
|
||||
|
||||
// Send completion signal in SSE format
|
||||
logger.info(`[${requestId}] Sending completion signal`)
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
|
||||
|
||||
logger.info(`[${requestId}] Closing controller`)
|
||||
controller.close()
|
||||
|
||||
logger.info(`[${requestId}] Wand generation streaming completed successfully`)
|
||||
} catch (streamError: any) {
|
||||
if (streamError.name === 'AbortError') {
|
||||
logger.info(
|
||||
`[${requestId}] Stream was aborted (timeout or cancel): ${streamError.message}`
|
||||
)
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({ error: 'Stream cancelled', done: true })}\n\n`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
logger.error(`[${requestId}] Streaming error`, { error: streamError.message })
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
|
||||
)
|
||||
)
|
||||
}
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] API request failed`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
throw new Error(`API request failed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Stream response received, starting processing`)
|
||||
|
||||
// Create a TransformStream to process the SSE data
|
||||
const encoder = new TextEncoder()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = response.body?.getReader()
|
||||
if (!reader) {
|
||||
controller.close()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
let buffer = ''
|
||||
let chunkCount = 0
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) {
|
||||
logger.info(`[${requestId}] Stream completed. Total chunks: ${chunkCount}`)
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
|
||||
controller.close()
|
||||
break
|
||||
}
|
||||
|
||||
// Decode the chunk
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
// Process complete SSE messages
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() || '' // Keep incomplete line in buffer
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data: ')) {
|
||||
const data = line.slice(6).trim()
|
||||
|
||||
if (data === '[DONE]') {
|
||||
logger.info(`[${requestId}] Received [DONE] signal`)
|
||||
controller.enqueue(
|
||||
encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`)
|
||||
)
|
||||
controller.close()
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
const content = parsed.choices?.[0]?.delta?.content
|
||||
|
||||
if (content) {
|
||||
chunkCount++
|
||||
if (chunkCount === 1) {
|
||||
logger.info(`[${requestId}] Received first content chunk`)
|
||||
}
|
||||
|
||||
// Forward the content
|
||||
controller.enqueue(
|
||||
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
|
||||
)
|
||||
}
|
||||
|
||||
// Log usage if present
|
||||
if (parsed.usage) {
|
||||
logger.info(
|
||||
`[${requestId}] Received usage data: ${JSON.stringify(parsed.usage)}`
|
||||
)
|
||||
}
|
||||
|
||||
// Log progress periodically
|
||||
if (chunkCount % 10 === 0) {
|
||||
logger.debug(`[${requestId}] Processed ${chunkCount} chunks`)
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Skip invalid JSON lines
|
||||
logger.debug(
|
||||
`[${requestId}] Skipped non-JSON line: ${data.substring(0, 100)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Wand generation streaming completed successfully`)
|
||||
} catch (streamError: any) {
|
||||
logger.error(`[${requestId}] Streaming error`, {
|
||||
name: streamError?.name,
|
||||
message: streamError?.message || 'Unknown error',
|
||||
stack: streamError?.stack,
|
||||
})
|
||||
|
||||
// Send error to client
|
||||
const errorData = `data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
|
||||
controller.enqueue(encoder.encode(errorData))
|
||||
controller.close()
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
// Return Response with proper headers for Node.js runtime
|
||||
return new Response(readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no', // Disable Nginx buffering
|
||||
'Transfer-Encoding': 'chunked', // Important for Node.js runtime
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Streaming error`, {
|
||||
error: error.message || 'Unknown error',
|
||||
stack: error.stack,
|
||||
logger.error(`[${requestId}] Failed to create stream`, {
|
||||
name: error?.name,
|
||||
message: error?.message || 'Unknown error',
|
||||
code: error?.code,
|
||||
status: error?.status,
|
||||
responseStatus: error?.response?.status,
|
||||
responseData: error?.response?.data ? safeStringify(error.response.data) : undefined,
|
||||
stack: error?.stack,
|
||||
useWandAzure,
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
endpoint: useWandAzure ? azureEndpoint : 'api.openai.com',
|
||||
apiVersion: useWandAzure ? azureApiVersion : 'N/A',
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
@@ -261,8 +318,19 @@ export async function POST(req: NextRequest) {
|
||||
return NextResponse.json({ success: true, content: generatedContent })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Wand generation failed`, {
|
||||
error: error.message || 'Unknown error',
|
||||
stack: error.stack,
|
||||
name: error?.name,
|
||||
message: error?.message || 'Unknown error',
|
||||
code: error?.code,
|
||||
status: error?.status,
|
||||
responseStatus: error instanceof OpenAI.APIError ? error.status : error?.response?.status,
|
||||
responseData: (error as any)?.response?.data
|
||||
? safeStringify((error as any).response.data)
|
||||
: undefined,
|
||||
stack: error?.stack,
|
||||
useWandAzure,
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
endpoint: useWandAzure ? azureEndpoint : 'api.openai.com',
|
||||
apiVersion: useWandAzure ? azureApiVersion : 'N/A',
|
||||
})
|
||||
|
||||
let clientErrorMessage = 'Wand generation failed. Please try again later.'
|
||||
|
||||
@@ -153,6 +153,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -11,7 +11,11 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflow as workflowTable } from '@/db/schema'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -125,6 +129,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: currentWorkflowData.edges,
|
||||
loops: currentWorkflowData.loops || {},
|
||||
parallels: currentWorkflowData.parallels || {},
|
||||
whiles: currentWorkflowData.whiles || {},
|
||||
}
|
||||
|
||||
const autoLayoutOptions = {
|
||||
@@ -166,6 +171,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
},
|
||||
|
||||
@@ -69,6 +69,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -109,6 +109,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
whiles: normalizedData.whiles,
|
||||
}
|
||||
|
||||
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
|
||||
@@ -192,6 +193,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
const blocksMap: Record<string, any> = {}
|
||||
const loops: Record<string, any> = {}
|
||||
const parallels: Record<string, any> = {}
|
||||
const whiles: Record<string, any> = {}
|
||||
|
||||
// Process blocks
|
||||
blocks.forEach((block) => {
|
||||
@@ -206,7 +208,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
})
|
||||
|
||||
// Process subflows (loops and parallels)
|
||||
// Process subflows (loops, parallels, and whiles)
|
||||
subflows.forEach((subflow) => {
|
||||
const config = (subflow.config as any) || {}
|
||||
if (subflow.type === 'loop') {
|
||||
@@ -225,6 +227,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
distribution: config.distribution || '',
|
||||
parallelType: config.parallelType || 'count',
|
||||
}
|
||||
} else if (subflow.type === 'while') {
|
||||
whiles[subflow.id] = {
|
||||
id: subflow.id,
|
||||
nodes: config.nodes || [],
|
||||
iterations: config.iterations || 1,
|
||||
whileType: config.whileType || 'while',
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -244,6 +253,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
|
||||
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
|
||||
import type { LoopConfig, ParallelConfig, WhileConfig } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowDuplicateAPI')
|
||||
|
||||
@@ -209,16 +209,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
})
|
||||
|
||||
// Update block references in subflow config
|
||||
let updatedConfig: LoopConfig | ParallelConfig = subflow.config as
|
||||
let updatedConfig: LoopConfig | ParallelConfig | WhileConfig = subflow.config as
|
||||
| LoopConfig
|
||||
| ParallelConfig
|
||||
| WhileConfig
|
||||
if (subflow.config && typeof subflow.config === 'object') {
|
||||
updatedConfig = JSON.parse(JSON.stringify(subflow.config)) as
|
||||
| LoopConfig
|
||||
| ParallelConfig
|
||||
|
||||
| WhileConfig
|
||||
// Update the config ID to match the new subflow ID
|
||||
|
||||
;(updatedConfig as any).id = newSubflowId
|
||||
|
||||
// Update node references in config if they exist
|
||||
|
||||
@@ -121,6 +121,7 @@ describe('Workflow Execution API Route', () => {
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: false, // Changed to false since it's from deployed state
|
||||
}),
|
||||
}))
|
||||
@@ -559,6 +560,7 @@ describe('Workflow Execution API Route', () => {
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: false, // Changed to false since it's from deployed state
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -115,13 +115,14 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
|
||||
const deployedData = await loadDeployedWorkflowState(workflowId)
|
||||
|
||||
// Use deployed data as primary source for API executions
|
||||
const { blocks, edges, loops, parallels } = deployedData
|
||||
const { blocks, edges, loops, parallels, whiles } = deployedData
|
||||
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
|
||||
logger.debug(`[${requestId}] Deployed data loaded:`, {
|
||||
blocksCount: Object.keys(blocks || {}).length,
|
||||
edgesCount: (edges || []).length,
|
||||
loopsCount: Object.keys(loops || {}).length,
|
||||
parallelsCount: Object.keys(parallels || {}).length,
|
||||
whilesCount: Object.keys(whiles || {}).length,
|
||||
})
|
||||
|
||||
// Use the same execution flow as in scheduled executions
|
||||
@@ -275,6 +276,7 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
true // Enable validation during execution
|
||||
)
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
edgesCount: deployedState.edges.length,
|
||||
loopsCount: Object.keys(deployedState.loops || {}).length,
|
||||
parallelsCount: Object.keys(deployedState.parallels || {}).length,
|
||||
whilesCount: Object.keys(deployedState.whiles || {}).length,
|
||||
})
|
||||
|
||||
// Save deployed state to normalized tables
|
||||
@@ -60,6 +61,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: deployedState.edges,
|
||||
loops: deployedState.loops || {},
|
||||
parallels: deployedState.parallels || {},
|
||||
whiles: deployedState.whiles || {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: workflowData.isDeployed,
|
||||
deployedAt: workflowData.deployedAt,
|
||||
|
||||
@@ -96,6 +96,7 @@ describe('Workflow By ID API Route', () => {
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
@@ -145,6 +146,7 @@ describe('Workflow By ID API Route', () => {
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
@@ -241,6 +243,7 @@ describe('Workflow By ID API Route', () => {
|
||||
edges: [{ id: 'edge-1', source: 'block-1', target: 'block-2' }],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
|
||||
@@ -126,6 +126,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
edgesCount: normalizedData.edges.length,
|
||||
loopsCount: Object.keys(normalizedData.loops).length,
|
||||
parallelsCount: Object.keys(normalizedData.parallels).length,
|
||||
whilesCount: Object.keys(normalizedData.whiles).length,
|
||||
loops: normalizedData.loops,
|
||||
})
|
||||
|
||||
@@ -141,6 +142,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
whiles: normalizedData.whiles,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: workflowData.isDeployed || false,
|
||||
deployedAt: workflowData.deployedAt,
|
||||
|
||||
@@ -24,6 +24,7 @@ const BlockDataSchema = z.object({
|
||||
count: z.number().optional(),
|
||||
loopType: z.enum(['for', 'forEach']).optional(),
|
||||
parallelType: z.enum(['collection', 'count']).optional(),
|
||||
whileType: z.enum(['while', 'doWhile']).optional(),
|
||||
type: z.string().optional(),
|
||||
})
|
||||
|
||||
@@ -87,6 +88,13 @@ const ParallelSchema = z.object({
|
||||
parallelType: z.enum(['count', 'collection']).optional(),
|
||||
})
|
||||
|
||||
const WhileSchema = z.object({
|
||||
id: z.string(),
|
||||
nodes: z.array(z.string()),
|
||||
iterations: z.number(),
|
||||
whileType: z.enum(['while', 'doWhile']),
|
||||
})
|
||||
|
||||
const DeploymentStatusSchema = z.object({
|
||||
id: z.string(),
|
||||
status: z.enum(['deploying', 'deployed', 'failed', 'stopping', 'stopped']),
|
||||
@@ -99,6 +107,7 @@ const WorkflowStateSchema = z.object({
|
||||
edges: z.array(EdgeSchema),
|
||||
loops: z.record(LoopSchema).optional(),
|
||||
parallels: z.record(ParallelSchema).optional(),
|
||||
whiles: z.record(WhileSchema).optional(),
|
||||
lastSaved: z.number().optional(),
|
||||
isDeployed: z.boolean().optional(),
|
||||
deployedAt: z.date().optional(),
|
||||
@@ -197,6 +206,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: state.edges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
whiles: state.whiles || {},
|
||||
lastSaved: state.lastSaved || Date.now(),
|
||||
isDeployed: state.isDeployed || false,
|
||||
deployedAt: state.deployedAt,
|
||||
@@ -231,6 +241,9 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
success: true,
|
||||
blocksCount: Object.keys(filteredBlocks).length,
|
||||
edgesCount: state.edges.length,
|
||||
loopsCount: Object.keys(state.loops || {}).length,
|
||||
parallelsCount: Object.keys(state.parallels || {}).length,
|
||||
whilesCount: Object.keys(state.whiles || {}).length,
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
|
||||
@@ -38,7 +38,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const blocksMap: Record<string, any> = {}
|
||||
const loops: Record<string, any> = {}
|
||||
const parallels: Record<string, any> = {}
|
||||
|
||||
const whiles: Record<string, any> = {}
|
||||
// Process blocks
|
||||
blocks.forEach((block) => {
|
||||
blocksMap[block.id] = {
|
||||
@@ -71,6 +71,13 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
distribution: config.distribution || '',
|
||||
parallelType: config.parallelType || 'count',
|
||||
}
|
||||
} else if (subflow.type === 'while') {
|
||||
whiles[subflow.id] = {
|
||||
id: subflow.id,
|
||||
nodes: config.nodes || [],
|
||||
iterations: config.iterations || 1,
|
||||
whileType: config.whileType || 'while',
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -90,6 +97,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
|
||||
@@ -16,7 +16,11 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflowCheckpoints, workflow as workflowTable } from '@/db/schema'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
@@ -80,6 +84,7 @@ async function createWorkflowCheckpoint(
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
@@ -293,6 +298,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
options: {
|
||||
generateNewIds: false, // We'll handle ID generation manually for now
|
||||
@@ -373,6 +379,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: [] as any[],
|
||||
loops: {} as Record<string, any>,
|
||||
parallels: {} as Record<string, any>,
|
||||
whiles: {} as Record<string, any>,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
@@ -391,7 +398,10 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
// Get block configuration for proper setup
|
||||
const blockConfig = getBlock(block.type)
|
||||
|
||||
if (!blockConfig && (block.type === 'loop' || block.type === 'parallel')) {
|
||||
if (
|
||||
!blockConfig &&
|
||||
(block.type === 'loop' || block.type === 'parallel' || block.type === 'while')
|
||||
) {
|
||||
// Handle loop/parallel blocks (they don't have regular block configs)
|
||||
// Preserve parentId if it exists (though loop/parallel shouldn't have parents)
|
||||
const containerData = block.data || {}
|
||||
@@ -414,7 +424,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
height: 0,
|
||||
data: containerData,
|
||||
}
|
||||
logger.debug(`[${requestId}] Processed loop/parallel block: ${block.id} -> ${newId}`)
|
||||
logger.debug(`[${requestId}] Processed loop/parallel/while block: ${block.id} -> ${newId}`)
|
||||
} else if (blockConfig) {
|
||||
// Handle regular blocks with proper configuration
|
||||
const subBlocks: Record<string, any> = {}
|
||||
@@ -545,14 +555,17 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
// Generate loop and parallel configurations
|
||||
const loops = generateLoopBlocks(newWorkflowState.blocks)
|
||||
const parallels = generateParallelBlocks(newWorkflowState.blocks)
|
||||
const whiles = generateWhileBlocks(newWorkflowState.blocks)
|
||||
newWorkflowState.loops = loops
|
||||
newWorkflowState.parallels = parallels
|
||||
newWorkflowState.whiles = whiles
|
||||
|
||||
logger.info(`[${requestId}] Generated workflow state`, {
|
||||
blocksCount: Object.keys(newWorkflowState.blocks).length,
|
||||
edgesCount: newWorkflowState.edges.length,
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
whilesCount: Object.keys(whiles).length,
|
||||
})
|
||||
|
||||
// Apply intelligent autolayout if requested
|
||||
@@ -566,6 +579,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
edges: newWorkflowState.edges,
|
||||
loops: newWorkflowState.loops || {},
|
||||
parallels: newWorkflowState.parallels || {},
|
||||
whiles: newWorkflowState.whiles || {},
|
||||
}
|
||||
|
||||
const autoLayoutOptions = {
|
||||
@@ -608,6 +622,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -685,6 +700,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
edgesCount: newWorkflowState.edges.length,
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
whilesCount: Object.keys(whiles).length,
|
||||
},
|
||||
errors: [],
|
||||
warnings,
|
||||
|
||||
@@ -4,7 +4,11 @@ import { simAgentClient } from '@/lib/sim-agent'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('WorkflowYamlAPI')
|
||||
|
||||
@@ -50,6 +54,7 @@ export async function POST(request: NextRequest) {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -10,7 +10,11 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflow } from '@/db/schema'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('WorkflowYamlExportAPI')
|
||||
|
||||
@@ -144,6 +148,7 @@ export async function GET(request: NextRequest) {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -9,10 +9,12 @@ import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
convertWhileBlockToWhile,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlAutoLayoutAPI')
|
||||
@@ -26,6 +28,7 @@ const AutoLayoutRequestSchema = z.object({
|
||||
edges: z.array(z.any()),
|
||||
loops: z.record(z.any()).optional().default({}),
|
||||
parallels: z.record(z.any()).optional().default({}),
|
||||
whiles: z.record(z.any()).optional().default({}),
|
||||
}),
|
||||
options: z
|
||||
.object({
|
||||
@@ -36,6 +39,7 @@ const AutoLayoutRequestSchema = z.object({
|
||||
horizontal: z.number().optional(),
|
||||
vertical: z.number().optional(),
|
||||
layer: z.number().optional(),
|
||||
while: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
alignment: z.enum(['start', 'center', 'end']).optional(),
|
||||
@@ -45,6 +49,12 @@ const AutoLayoutRequestSchema = z.object({
|
||||
y: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
while: z
|
||||
.object({
|
||||
x: z.number().optional(),
|
||||
y: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
@@ -133,8 +143,10 @@ export async function POST(request: NextRequest) {
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
@@ -192,6 +204,7 @@ export async function POST(request: NextRequest) {
|
||||
edges: workflowState.edges || [],
|
||||
loops: workflowState.loops || {},
|
||||
parallels: workflowState.parallels || {},
|
||||
whiles: workflowState.whiles || {},
|
||||
},
|
||||
errors: result.errors,
|
||||
}
|
||||
|
||||
@@ -9,10 +9,12 @@ import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
convertWhileBlockToWhile,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlDiffCreateAPI')
|
||||
@@ -130,8 +132,10 @@ export async function POST(request: NextRequest) {
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
@@ -168,7 +172,7 @@ export async function POST(request: NextRequest) {
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
|
||||
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
hasData: !!block.data,
|
||||
@@ -180,8 +184,10 @@ export async function POST(request: NextRequest) {
|
||||
// Log existing loops/parallels from sim-agent
|
||||
const loops = result.diff?.proposedState?.loops || result.loops || {}
|
||||
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
|
||||
const whiles = result.diff?.proposedState?.whiles || result.whiles || {}
|
||||
logger.info(`[${requestId}] Sim agent loops:`, loops)
|
||||
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
|
||||
logger.info(`[${requestId}] Sim agent whiles:`, whiles)
|
||||
}
|
||||
|
||||
// Log diff analysis specifically
|
||||
@@ -207,7 +213,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
@@ -251,17 +257,23 @@ export async function POST(request: NextRequest) {
|
||||
// Now regenerate loops and parallels with the fixed relationships
|
||||
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
|
||||
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
|
||||
|
||||
const whiles = generateWhileBlocks(result.diff.proposedState.blocks)
|
||||
result.diff.proposedState.loops = loops
|
||||
result.diff.proposedState.parallels = parallels
|
||||
result.diff.proposedState.whiles = whiles
|
||||
|
||||
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
whilesCount: Object.keys(whiles).length,
|
||||
loops: Object.keys(loops).map((id) => ({
|
||||
id,
|
||||
nodes: loops[id].nodes,
|
||||
})),
|
||||
whiles: Object.keys(whiles).map((id) => ({
|
||||
id,
|
||||
nodes: whiles[id].nodes,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -309,7 +321,7 @@ export async function POST(request: NextRequest) {
|
||||
// Generate loops and parallels for the blocks with fixed relationships
|
||||
const loops = generateLoopBlocks(result.blocks)
|
||||
const parallels = generateParallelBlocks(result.blocks)
|
||||
|
||||
const whiles = generateWhileBlocks(result.blocks)
|
||||
const transformedResult = {
|
||||
success: result.success,
|
||||
diff: {
|
||||
@@ -318,6 +330,7 @@ export async function POST(request: NextRequest) {
|
||||
edges: result.edges || [],
|
||||
loops: loops,
|
||||
parallels: parallels,
|
||||
whiles: whiles,
|
||||
},
|
||||
diffAnalysis: diffAnalysis,
|
||||
metadata: result.metadata || {
|
||||
|
||||
@@ -9,10 +9,12 @@ import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
convertWhileBlockToWhile,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlDiffMergeAPI')
|
||||
@@ -27,6 +29,7 @@ const MergeDiffRequestSchema = z.object({
|
||||
edges: z.array(z.any()),
|
||||
loops: z.record(z.any()).optional(),
|
||||
parallels: z.record(z.any()).optional(),
|
||||
whiles: z.record(z.any()).optional(),
|
||||
}),
|
||||
diffAnalysis: z.any().optional(),
|
||||
metadata: z.object({
|
||||
@@ -103,6 +106,8 @@ export async function POST(request: NextRequest) {
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
@@ -139,7 +144,7 @@ export async function POST(request: NextRequest) {
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
|
||||
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
hasData: !!block.data,
|
||||
@@ -151,8 +156,10 @@ export async function POST(request: NextRequest) {
|
||||
// Log existing loops/parallels from sim-agent
|
||||
const loops = result.diff?.proposedState?.loops || result.loops || {}
|
||||
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
|
||||
const whiles = result.diff?.proposedState?.whiles || result.whiles || {}
|
||||
logger.info(`[${requestId}] Sim agent loops:`, loops)
|
||||
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
|
||||
logger.info(`[${requestId}] Sim agent whiles:`, whiles)
|
||||
}
|
||||
|
||||
// Post-process the result to ensure loops and parallels are properly generated
|
||||
@@ -165,13 +172,16 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
(edge: any) =>
|
||||
edge.source === container.id &&
|
||||
(edge.sourceHandle === 'loop-start-source' ||
|
||||
edge.sourceHandle === 'while-start-source')
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
@@ -198,17 +208,23 @@ export async function POST(request: NextRequest) {
|
||||
// Now regenerate loops and parallels with the fixed relationships
|
||||
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
|
||||
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
|
||||
|
||||
const whiles = generateWhileBlocks(result.diff.proposedState.blocks)
|
||||
result.diff.proposedState.loops = loops
|
||||
result.diff.proposedState.parallels = parallels
|
||||
result.diff.proposedState.whiles = whiles
|
||||
|
||||
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
whilesCount: Object.keys(whiles).length,
|
||||
loops: Object.keys(loops).map((id) => ({
|
||||
id,
|
||||
nodes: loops[id].nodes,
|
||||
})),
|
||||
whiles: Object.keys(whiles).map((id) => ({
|
||||
id,
|
||||
nodes: whiles[id].nodes,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -223,13 +239,16 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
(edge: any) =>
|
||||
edge.source === container.id &&
|
||||
(edge.sourceHandle === 'loop-start-source' ||
|
||||
edge.sourceHandle === 'while-start-source')
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
@@ -256,7 +275,7 @@ export async function POST(request: NextRequest) {
|
||||
// Generate loops and parallels for the blocks with fixed relationships
|
||||
const loops = generateLoopBlocks(result.blocks)
|
||||
const parallels = generateParallelBlocks(result.blocks)
|
||||
|
||||
const whiles = generateWhileBlocks(result.blocks)
|
||||
const transformedResult = {
|
||||
success: result.success,
|
||||
diff: {
|
||||
@@ -265,6 +284,7 @@ export async function POST(request: NextRequest) {
|
||||
edges: result.edges || existingDiff.proposedState.edges || [],
|
||||
loops: loops,
|
||||
parallels: parallels,
|
||||
whiles: whiles,
|
||||
},
|
||||
diffAnalysis: diffAnalysis,
|
||||
metadata: result.metadata || {
|
||||
|
||||
@@ -6,7 +6,11 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlGenerateAPI')
|
||||
|
||||
@@ -60,6 +64,7 @@ export async function POST(request: NextRequest) {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -6,7 +6,11 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlParseAPI')
|
||||
|
||||
@@ -57,6 +61,7 @@ export async function POST(request: NextRequest) {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -6,7 +6,11 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlToWorkflowAPI')
|
||||
|
||||
@@ -65,6 +69,7 @@ export async function POST(request: NextRequest) {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
generateWhileBlocks: generateWhileBlocks.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
|
||||
.workflow-container .react-flow__node-loopNode,
|
||||
.workflow-container .react-flow__node-parallelNode,
|
||||
.workflow-container .react-flow__node-whileNode,
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ export function DeployedWorkflowModal({
|
||||
edges: state.edges,
|
||||
loops: state.loops,
|
||||
parallels: state.parallels,
|
||||
whiles: state.whiles,
|
||||
}))
|
||||
|
||||
const handleRevert = () => {
|
||||
|
||||
@@ -83,6 +83,7 @@ export function DiffControls() {
|
||||
edges: rawState.edges || [],
|
||||
loops: rawState.loops || {},
|
||||
parallels: rawState.parallels || {},
|
||||
whiles: rawState.whiles || {},
|
||||
lastSaved: rawState.lastSaved || Date.now(),
|
||||
isDeployed: rawState.isDeployed || false,
|
||||
deploymentStatuses: rawState.deploymentStatuses || {},
|
||||
@@ -98,6 +99,7 @@ export function DiffControls() {
|
||||
edgesCount: workflowState.edges.length,
|
||||
loopsCount: Object.keys(workflowState.loops).length,
|
||||
parallelsCount: Object.keys(workflowState.parallels).length,
|
||||
whilesCount: Object.keys(workflowState.whiles).length,
|
||||
hasRequiredFields: Object.values(workflowState.blocks).every(
|
||||
(block) => block.id && block.type && block.name && block.position
|
||||
),
|
||||
@@ -146,6 +148,7 @@ export function DiffControls() {
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId,
|
||||
whiles: workflowState.whiles,
|
||||
workflowState: JSON.stringify(workflowState),
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -12,9 +12,10 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/themes/prism.css'
|
||||
|
||||
type IterationType = 'loop' | 'parallel'
|
||||
type IterationType = 'loop' | 'parallel' | 'while'
|
||||
type LoopType = 'for' | 'forEach'
|
||||
type ParallelType = 'count' | 'collection'
|
||||
type WhileType = 'while' | 'doWhile'
|
||||
|
||||
interface IterationNodeData {
|
||||
width?: number
|
||||
@@ -25,9 +26,11 @@ interface IterationNodeData {
|
||||
extent?: 'parent'
|
||||
loopType?: LoopType
|
||||
parallelType?: ParallelType
|
||||
whileType?: WhileType
|
||||
// Common
|
||||
count?: number
|
||||
collection?: string | any[] | Record<string, any>
|
||||
condition?: string
|
||||
isPreview?: boolean
|
||||
executionState?: {
|
||||
currentIteration?: number
|
||||
@@ -65,6 +68,12 @@ const CONFIG = {
|
||||
items: 'distribution' as const,
|
||||
},
|
||||
},
|
||||
while: {
|
||||
typeLabels: { while: 'While Loop', doWhile: 'Do While' },
|
||||
typeKey: 'whileType' as const,
|
||||
storeKey: 'whiles' as const,
|
||||
maxIterations: 100,
|
||||
},
|
||||
} as const
|
||||
|
||||
export function IterationBadges({ nodeId, data, iterationType }: IterationBadgesProps) {
|
||||
@@ -77,9 +86,21 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
|
||||
// Determine current type and values
|
||||
const currentType = (data?.[config.typeKey] ||
|
||||
(iterationType === 'loop' ? 'for' : 'count')) as any
|
||||
const configIterations = (nodeConfig as any)?.[config.configKeys.iterations] ?? data?.count ?? 5
|
||||
const configCollection = (nodeConfig as any)?.[config.configKeys.items] ?? data?.collection ?? ''
|
||||
(iterationType === 'loop' ? 'for' : iterationType === 'parallel' ? 'count' : 'while')) as any
|
||||
|
||||
const configIterations =
|
||||
iterationType === 'loop'
|
||||
? ((nodeConfig as any)?.[CONFIG.loop.configKeys.iterations] ?? data?.count ?? 5)
|
||||
: iterationType === 'parallel'
|
||||
? ((nodeConfig as any)?.[CONFIG.parallel.configKeys.iterations] ?? data?.count ?? 5)
|
||||
: ((nodeConfig as any)?.iterations ?? data?.count ?? 5)
|
||||
|
||||
const configCollection =
|
||||
iterationType === 'loop'
|
||||
? ((nodeConfig as any)?.[CONFIG.loop.configKeys.items] ?? data?.collection ?? '')
|
||||
: iterationType === 'parallel'
|
||||
? ((nodeConfig as any)?.[CONFIG.parallel.configKeys.items] ?? data?.collection ?? '')
|
||||
: ''
|
||||
|
||||
const iterations = configIterations
|
||||
const collectionString =
|
||||
@@ -87,8 +108,10 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
|
||||
// State management
|
||||
const [tempInputValue, setTempInputValue] = useState<string | null>(null)
|
||||
const isWhile = iterationType === 'while'
|
||||
const [whileValue, setWhileValue] = useState<string>(data?.condition || '')
|
||||
const inputValue = tempInputValue ?? iterations.toString()
|
||||
const editorValue = collectionString
|
||||
const editorValue = isWhile ? whileValue : collectionString
|
||||
const [typePopoverOpen, setTypePopoverOpen] = useState(false)
|
||||
const [configPopoverOpen, setConfigPopoverOpen] = useState(false)
|
||||
const [showTagDropdown, setShowTagDropdown] = useState(false)
|
||||
@@ -100,6 +123,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
const {
|
||||
collaborativeUpdateLoopType,
|
||||
collaborativeUpdateParallelType,
|
||||
collaborativeUpdateWhileType,
|
||||
collaborativeUpdateIterationCount,
|
||||
collaborativeUpdateIterationCollection,
|
||||
} = useCollaborativeWorkflow()
|
||||
@@ -110,12 +134,21 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
if (isPreview) return
|
||||
if (iterationType === 'loop') {
|
||||
collaborativeUpdateLoopType(nodeId, newType)
|
||||
} else {
|
||||
} else if (iterationType === 'parallel') {
|
||||
collaborativeUpdateParallelType(nodeId, newType)
|
||||
} else {
|
||||
collaborativeUpdateWhileType(nodeId, newType)
|
||||
}
|
||||
setTypePopoverOpen(false)
|
||||
},
|
||||
[nodeId, iterationType, collaborativeUpdateLoopType, collaborativeUpdateParallelType, isPreview]
|
||||
[
|
||||
nodeId,
|
||||
iterationType,
|
||||
collaborativeUpdateLoopType,
|
||||
collaborativeUpdateParallelType,
|
||||
collaborativeUpdateWhileType,
|
||||
isPreview,
|
||||
]
|
||||
)
|
||||
|
||||
// Handle iterations input change
|
||||
@@ -141,7 +174,9 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
|
||||
if (!Number.isNaN(value)) {
|
||||
const newValue = Math.min(config.maxIterations, Math.max(1, value))
|
||||
collaborativeUpdateIterationCount(nodeId, iterationType, newValue)
|
||||
if (iterationType === 'loop' || iterationType === 'parallel') {
|
||||
collaborativeUpdateIterationCount(nodeId, iterationType, newValue)
|
||||
}
|
||||
}
|
||||
setTempInputValue(null)
|
||||
setConfigPopoverOpen(false)
|
||||
@@ -158,7 +193,11 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
const handleEditorChange = useCallback(
|
||||
(value: string) => {
|
||||
if (isPreview) return
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, value)
|
||||
if (iterationType === 'loop' || iterationType === 'parallel') {
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, value)
|
||||
} else if (isWhile) {
|
||||
setWhileValue(value)
|
||||
}
|
||||
|
||||
const textarea = editorContainerRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
@@ -170,14 +209,18 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
setShowTagDropdown(triggerCheck.show)
|
||||
}
|
||||
},
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview]
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview, isWhile]
|
||||
)
|
||||
|
||||
// Handle tag selection
|
||||
const handleTagSelect = useCallback(
|
||||
(newValue: string) => {
|
||||
if (isPreview) return
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, newValue)
|
||||
if (iterationType === 'loop' || iterationType === 'parallel') {
|
||||
collaborativeUpdateIterationCollection(nodeId, iterationType, newValue)
|
||||
} else if (isWhile) {
|
||||
setWhileValue(newValue)
|
||||
}
|
||||
setShowTagDropdown(false)
|
||||
|
||||
setTimeout(() => {
|
||||
@@ -187,7 +230,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
}
|
||||
}, 0)
|
||||
},
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview]
|
||||
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview, isWhile]
|
||||
)
|
||||
|
||||
// Determine if we're in count mode or collection mode
|
||||
@@ -223,7 +266,11 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
<PopoverContent className='w-48 p-3' align='center' onClick={(e) => e.stopPropagation()}>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
{iterationType === 'loop' ? 'Loop Type' : 'Parallel Type'}
|
||||
{iterationType === 'loop'
|
||||
? 'Loop Type'
|
||||
: iterationType === 'parallel'
|
||||
? 'Parallel Type'
|
||||
: 'While Type'}
|
||||
</div>
|
||||
<div className='space-y-1'>
|
||||
{typeOptions.map(([typeValue, typeLabel]) => (
|
||||
@@ -259,24 +306,63 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
|
||||
)}
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
>
|
||||
{isCountMode ? `Iterations: ${iterations}` : 'Items'}
|
||||
{isWhile ? 'Condition' : isCountMode ? `Iterations: ${iterations}` : 'Items'}
|
||||
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
|
||||
</Badge>
|
||||
</PopoverTrigger>
|
||||
{!isPreview && (
|
||||
<PopoverContent
|
||||
className={cn('p-3', !isCountMode ? 'w-72' : 'w-48')}
|
||||
className={cn('p-3', isWhile || !isCountMode ? 'w-72' : 'w-48')}
|
||||
align='center'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<div className='space-y-2'>
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
{isCountMode
|
||||
? `${iterationType === 'loop' ? 'Loop' : 'Parallel'} Iterations`
|
||||
: `${iterationType === 'loop' ? 'Collection' : 'Parallel'} Items`}
|
||||
{isWhile
|
||||
? 'While Condition'
|
||||
: isCountMode
|
||||
? `${iterationType === 'loop' ? 'Loop' : 'Parallel'} Iterations`
|
||||
: `${iterationType === 'loop' ? 'Collection' : 'Parallel'} Items`}
|
||||
</div>
|
||||
|
||||
{isCountMode ? (
|
||||
{isWhile ? (
|
||||
// Code editor for while condition
|
||||
<div ref={editorContainerRef} className='relative'>
|
||||
<div className='relative min-h-[80px] rounded-md border border-input bg-background px-3 pt-2 pb-3 font-mono text-sm'>
|
||||
{editorValue === '' && (
|
||||
<div className='pointer-events-none absolute top-[8.5px] left-3 select-none text-muted-foreground/50'>
|
||||
condition === true
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
value={editorValue}
|
||||
onValueChange={handleEditorChange}
|
||||
highlight={(code) => highlight(code, languages.javascript, 'javascript')}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'monospace',
|
||||
lineHeight: '21px',
|
||||
}}
|
||||
className='w-full focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full overflow-hidden whitespace-pre-wrap'
|
||||
/>
|
||||
</div>
|
||||
<div className='mt-2 text-[10px] text-muted-foreground'>
|
||||
Enter a boolean expression.
|
||||
</div>
|
||||
{showTagDropdown && (
|
||||
<TagDropdown
|
||||
visible={showTagDropdown}
|
||||
onSelect={handleTagSelect}
|
||||
blockId={nodeId}
|
||||
activeSourceBlockId={null}
|
||||
inputValue={editorValue}
|
||||
cursorPosition={cursorPosition}
|
||||
onClose={() => setShowTagDropdown(false)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
) : isCountMode ? (
|
||||
// Number input for count-based mode
|
||||
<div className='flex items-center gap-2'>
|
||||
<Input
|
||||
|
||||
@@ -136,12 +136,40 @@ describe('SubflowNodeComponent', () => {
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it.concurrent('should accept while kind in NodeProps data', () => {
|
||||
const validProps = {
|
||||
id: 'test-id-while',
|
||||
type: 'subflowNode' as const,
|
||||
data: {
|
||||
width: 400,
|
||||
height: 300,
|
||||
isPreview: false,
|
||||
kind: 'while' as const,
|
||||
},
|
||||
selected: false,
|
||||
zIndex: 1,
|
||||
isConnectable: true,
|
||||
xPos: 0,
|
||||
yPos: 0,
|
||||
dragging: false,
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
|
||||
expect(_component).toBeDefined()
|
||||
expect(validProps.type).toBe('subflowNode')
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it.concurrent('should handle different data configurations', () => {
|
||||
const configurations = [
|
||||
{ width: 500, height: 300, isPreview: false, kind: 'loop' as const },
|
||||
{ width: 800, height: 600, isPreview: true, kind: 'parallel' as const },
|
||||
{ width: 500, height: 300, isPreview: false, kind: 'while' as const },
|
||||
{ width: 0, height: 0, isPreview: false, kind: 'loop' as const },
|
||||
{ width: 0, height: 0, isPreview: false, kind: 'while' as const },
|
||||
{ kind: 'loop' as const },
|
||||
{ kind: 'while' as const },
|
||||
]
|
||||
|
||||
configurations.forEach((data) => {
|
||||
@@ -306,10 +334,20 @@ describe('SubflowNodeComponent', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct handle IDs for parallel kind', () => {
|
||||
type SubflowKind = 'loop' | 'parallel'
|
||||
type SubflowKind = 'loop' | 'parallel' | 'while'
|
||||
const testHandleGeneration = (kind: SubflowKind) => {
|
||||
const startHandleId = kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
|
||||
const endHandleId = kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
|
||||
const startHandleId =
|
||||
kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
const endHandleId =
|
||||
kind === 'loop'
|
||||
? 'loop-end-source'
|
||||
: kind === 'parallel'
|
||||
? 'parallel-end-source'
|
||||
: 'while-end-source'
|
||||
return { startHandleId, endHandleId }
|
||||
}
|
||||
|
||||
@@ -318,6 +356,29 @@ describe('SubflowNodeComponent', () => {
|
||||
expect(result.endHandleId).toBe('parallel-end-source')
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct handle IDs for while kind', () => {
|
||||
type SubflowKind = 'loop' | 'parallel' | 'while'
|
||||
const testHandleGeneration = (kind: SubflowKind) => {
|
||||
const startHandleId =
|
||||
kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
const endHandleId =
|
||||
kind === 'loop'
|
||||
? 'loop-end-source'
|
||||
: kind === 'parallel'
|
||||
? 'parallel-end-source'
|
||||
: 'while-end-source'
|
||||
return { startHandleId, endHandleId }
|
||||
}
|
||||
|
||||
const result = testHandleGeneration('while')
|
||||
expect(result.startHandleId).toBe('while-start-source')
|
||||
expect(result.endHandleId).toBe('while-end-source')
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct background colors for loop kind', () => {
|
||||
const loopData = { ...defaultProps.data, kind: 'loop' as const }
|
||||
const startBg = loopData.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
|
||||
@@ -326,21 +387,41 @@ describe('SubflowNodeComponent', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct background colors for parallel kind', () => {
|
||||
type SubflowKind = 'loop' | 'parallel'
|
||||
type SubflowKind = 'loop' | 'parallel' | 'while'
|
||||
const testBgGeneration = (kind: SubflowKind) => {
|
||||
return kind === 'loop' ? '#2FB3FF' : '#FEE12B'
|
||||
return kind === 'loop' ? '#2FB3FF' : kind === 'parallel' ? '#FEE12B' : '#57D9A3'
|
||||
}
|
||||
|
||||
const startBg = testBgGeneration('parallel')
|
||||
expect(startBg).toBe('#FEE12B')
|
||||
})
|
||||
|
||||
it.concurrent('should generate correct background colors for while kind', () => {
|
||||
type SubflowKind = 'loop' | 'parallel' | 'while'
|
||||
const testBgGeneration = (kind: SubflowKind) => {
|
||||
return kind === 'loop' ? '#2FB3FF' : kind === 'parallel' ? '#FEE12B' : '#57D9A3'
|
||||
}
|
||||
|
||||
const startBg = testBgGeneration('while')
|
||||
expect(startBg).toBe('#57D9A3')
|
||||
})
|
||||
|
||||
it.concurrent('should demonstrate handle ID generation for any kind', () => {
|
||||
type SubflowKind = 'loop' | 'parallel'
|
||||
type SubflowKind = 'loop' | 'parallel' | 'while'
|
||||
const testKind = (kind: SubflowKind) => {
|
||||
const data = { kind }
|
||||
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
|
||||
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
|
||||
const startHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
const endHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-end-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-end-source'
|
||||
: 'while-end-source'
|
||||
return { startHandleId, endHandleId }
|
||||
}
|
||||
|
||||
@@ -351,6 +432,10 @@ describe('SubflowNodeComponent', () => {
|
||||
const parallelResult = testKind('parallel')
|
||||
expect(parallelResult.startHandleId).toBe('parallel-start-source')
|
||||
expect(parallelResult.endHandleId).toBe('parallel-end-source')
|
||||
|
||||
const whileResult = testKind('while')
|
||||
expect(whileResult.startHandleId).toBe('while-start-source')
|
||||
expect(whileResult.endHandleId).toBe('while-end-source')
|
||||
})
|
||||
|
||||
it.concurrent('should pass correct iterationType to IterationBadges for loop', () => {
|
||||
@@ -368,25 +453,49 @@ describe('SubflowNodeComponent', () => {
|
||||
expect(parallelProps.data.kind).toBe('parallel')
|
||||
})
|
||||
|
||||
it.concurrent('should handle both kinds in configuration arrays', () => {
|
||||
const bothKinds = ['loop', 'parallel'] as const
|
||||
bothKinds.forEach((kind) => {
|
||||
it.concurrent('should pass correct iterationType to IterationBadges for while', () => {
|
||||
const whileProps = {
|
||||
...defaultProps,
|
||||
data: { ...defaultProps.data, kind: 'while' as const },
|
||||
}
|
||||
// Mock IterationBadges should receive the kind as iterationType
|
||||
expect(whileProps.data.kind).toBe('while')
|
||||
})
|
||||
|
||||
it.concurrent('should handle loop, parallel, and while kinds in configuration arrays', () => {
|
||||
const allKinds = ['loop', 'parallel', 'while'] as const
|
||||
allKinds.forEach((kind) => {
|
||||
const data = { ...defaultProps.data, kind }
|
||||
expect(['loop', 'parallel']).toContain(data.kind)
|
||||
expect(['loop', 'parallel', 'while']).toContain(data.kind)
|
||||
|
||||
// Test handle ID generation for both kinds
|
||||
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
|
||||
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
|
||||
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
|
||||
const startHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
const endHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-end-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-end-source'
|
||||
: 'while-end-source'
|
||||
const startBg =
|
||||
data.kind === 'loop' ? '#2FB3FF' : data.kind === 'parallel' ? '#FEE12B' : '#57D9A3'
|
||||
|
||||
if (kind === 'loop') {
|
||||
expect(startHandleId).toBe('loop-start-source')
|
||||
expect(endHandleId).toBe('loop-end-source')
|
||||
expect(startBg).toBe('#2FB3FF')
|
||||
} else {
|
||||
} else if (kind === 'parallel') {
|
||||
expect(startHandleId).toBe('parallel-start-source')
|
||||
expect(endHandleId).toBe('parallel-end-source')
|
||||
expect(startBg).toBe('#FEE12B')
|
||||
} else {
|
||||
expect(startHandleId).toBe('while-start-source')
|
||||
expect(endHandleId).toBe('while-end-source')
|
||||
expect(startBg).toBe('#57D9A3')
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -433,10 +542,15 @@ describe('SubflowNodeComponent', () => {
|
||||
...defaultProps,
|
||||
data: { ...defaultProps.data, kind: 'parallel' as const },
|
||||
}
|
||||
const whileProps = {
|
||||
...defaultProps,
|
||||
data: { ...defaultProps.data, kind: 'while' as const },
|
||||
}
|
||||
|
||||
// The iterationType should match the kind
|
||||
expect(loopProps.data.kind).toBe('loop')
|
||||
expect(parallelProps.data.kind).toBe('parallel')
|
||||
expect(whileProps.data.kind).toBe('while')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -26,6 +26,12 @@ const SubflowNodeStyles: React.FC = () => {
|
||||
100% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0); }
|
||||
}
|
||||
|
||||
@keyframes while-node-pulse {
|
||||
0% { box-shadow: 0 0 0 0 rgba(255, 159, 67, 0.3); }
|
||||
70% { box-shadow: 0 0 0 6px rgba(255, 159, 67, 0); }
|
||||
100% { box-shadow: 0 0 0 0 rgba(255, 159, 67, 0); }
|
||||
}
|
||||
|
||||
.loop-node-drag-over {
|
||||
animation: loop-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
border-style: solid !important;
|
||||
@@ -40,6 +46,13 @@ const SubflowNodeStyles: React.FC = () => {
|
||||
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
|
||||
}
|
||||
|
||||
.while-node-drag-over {
|
||||
animation: while-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
border-style: solid !important;
|
||||
background-color: rgba(255, 159, 67, 0.08) !important;
|
||||
box-shadow: 0 0 0 8px rgba(255, 159, 67, 0.1);
|
||||
}
|
||||
|
||||
.react-flow__node-group:hover,
|
||||
.hover-highlight {
|
||||
border-color: #1e293b !important;
|
||||
@@ -69,7 +82,7 @@ export interface SubflowNodeData {
|
||||
extent?: 'parent'
|
||||
hasNestedError?: boolean
|
||||
isPreview?: boolean
|
||||
kind: 'loop' | 'parallel'
|
||||
kind: 'loop' | 'parallel' | 'while'
|
||||
}
|
||||
|
||||
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
|
||||
@@ -114,9 +127,26 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
|
||||
const nestedStyles = getNestedStyles()
|
||||
|
||||
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
|
||||
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
|
||||
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
|
||||
const startHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
const endHandleId =
|
||||
data.kind === 'loop'
|
||||
? 'loop-end-source'
|
||||
: data.kind === 'parallel'
|
||||
? 'parallel-end-source'
|
||||
: 'while-end-source'
|
||||
const startBg =
|
||||
data.kind === 'loop'
|
||||
? '#2FB3FF'
|
||||
: data.kind === 'parallel'
|
||||
? '#FEE12B'
|
||||
: data.kind === 'while'
|
||||
? '#FF9F43'
|
||||
: '#2FB3FF'
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
import { RefreshCwIcon } from 'lucide-react'
|
||||
|
||||
export const WhileTool = {
|
||||
id: 'while',
|
||||
type: 'while',
|
||||
name: 'While',
|
||||
description: 'While Loop',
|
||||
icon: RefreshCwIcon,
|
||||
bgColor: '#CC5500',
|
||||
data: {
|
||||
label: 'While',
|
||||
whileType: 'while' as 'while' | 'doWhile',
|
||||
condition: '',
|
||||
width: 500,
|
||||
height: 300,
|
||||
extent: 'parent',
|
||||
executionState: {
|
||||
currentIteration: 0,
|
||||
isExecuting: false,
|
||||
startTime: null,
|
||||
endTime: null,
|
||||
},
|
||||
},
|
||||
style: {
|
||||
width: 500,
|
||||
height: 300,
|
||||
},
|
||||
isResizable: true,
|
||||
}
|
||||
@@ -106,32 +106,34 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{/* Remove from subflow - only show when inside loop/parallel */}
|
||||
{!isStarterBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => {
|
||||
if (!disabled && userPermissions.canEdit) {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('remove-from-subflow', { detail: { blockId } })
|
||||
)
|
||||
}
|
||||
}}
|
||||
className={cn(
|
||||
'text-gray-500',
|
||||
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
disabled={disabled || !userPermissions.canEdit}
|
||||
>
|
||||
<LogOut className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
{/* Remove from subflow - only show when inside loop/parallel/while */}
|
||||
{!isStarterBlock &&
|
||||
parentId &&
|
||||
(parentType === 'loop' || parentType === 'parallel' || parentType === 'while') && (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => {
|
||||
if (!disabled && userPermissions.canEdit) {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('remove-from-subflow', { detail: { blockId } })
|
||||
)
|
||||
}
|
||||
}}
|
||||
className={cn(
|
||||
'text-gray-500',
|
||||
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
disabled={disabled || !userPermissions.canEdit}
|
||||
>
|
||||
<LogOut className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
|
||||
@@ -79,7 +79,7 @@ export function ConnectionBlocks({
|
||||
const blockConfig = getBlock(connection.type)
|
||||
const displayName = connection.name // Use the actual block name instead of transforming it
|
||||
|
||||
// Handle special blocks that aren't in the registry (loop and parallel)
|
||||
// Handle special blocks that aren't in the registry (loop, parallel, while)
|
||||
let Icon = blockConfig?.icon
|
||||
let bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
|
||||
|
||||
@@ -90,6 +90,9 @@ export function ConnectionBlocks({
|
||||
} else if (connection.type === 'parallel') {
|
||||
Icon = SplitIcon as typeof Icon
|
||||
bgColor = '#FEE12B' // Yellow color for parallel blocks
|
||||
} else if (connection.type === 'while') {
|
||||
Icon = RepeatIcon as typeof Icon
|
||||
bgColor = '#FF9F43' // Orange color for while blocks
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ export function generateFullWorkflowData() {
|
||||
edges: workflowState.edges,
|
||||
loops: workflowState.loops,
|
||||
parallels: workflowState.parallels,
|
||||
whiles: workflowState.whiles,
|
||||
},
|
||||
subBlockValues,
|
||||
exportedAt: new Date().toISOString(),
|
||||
|
||||
@@ -3,7 +3,13 @@ import type { Edge } from 'reactflow'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import type {
|
||||
BlockState,
|
||||
Loop,
|
||||
Parallel,
|
||||
While,
|
||||
WorkflowState,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Interface for the current workflow abstraction
|
||||
@@ -14,6 +20,7 @@ export interface CurrentWorkflow {
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
whiles: Record<string, While>
|
||||
lastSaved?: number
|
||||
isDeployed?: boolean
|
||||
deployedAt?: Date
|
||||
@@ -61,6 +68,7 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
edges: activeWorkflow.edges,
|
||||
loops: activeWorkflow.loops || {},
|
||||
parallels: activeWorkflow.parallels || {},
|
||||
whiles: activeWorkflow.whiles || {},
|
||||
lastSaved: activeWorkflow.lastSaved,
|
||||
isDeployed: activeWorkflow.isDeployed,
|
||||
deployedAt: activeWorkflow.deployedAt,
|
||||
|
||||
@@ -522,6 +522,7 @@ export function useWorkflowExecution() {
|
||||
edges: workflowEdges,
|
||||
loops: workflowLoops,
|
||||
parallels: workflowParallels,
|
||||
whiles: workflowWhiles,
|
||||
} = currentWorkflow
|
||||
|
||||
// Filter out blocks without type (these are layout-only blocks)
|
||||
@@ -633,7 +634,8 @@ export function useWorkflowExecution() {
|
||||
filteredStates,
|
||||
filteredEdges,
|
||||
workflowLoops,
|
||||
workflowParallels
|
||||
workflowParallels,
|
||||
workflowWhiles
|
||||
)
|
||||
|
||||
// If this is a chat execution, get the selected outputs
|
||||
|
||||
@@ -104,6 +104,7 @@ export async function executeWorkflowWithLogging(
|
||||
edges: workflowEdges,
|
||||
loops: workflowLoops,
|
||||
parallels: workflowParallels,
|
||||
whiles: workflowWhiles,
|
||||
} = currentWorkflow
|
||||
|
||||
// Filter out blocks without type (these are layout-only blocks)
|
||||
@@ -201,7 +202,8 @@ export async function executeWorkflowWithLogging(
|
||||
filteredStates,
|
||||
filteredEdges,
|
||||
workflowLoops,
|
||||
workflowParallels
|
||||
workflowParallels,
|
||||
workflowWhiles
|
||||
)
|
||||
|
||||
// If this is a chat execution, get the selected outputs
|
||||
|
||||
@@ -15,7 +15,8 @@ const isContainerType = (blockType: string): boolean => {
|
||||
blockType === 'parallel' ||
|
||||
blockType === 'loopNode' ||
|
||||
blockType === 'parallelNode' ||
|
||||
blockType === 'subflowNode'
|
||||
blockType === 'subflowNode' ||
|
||||
blockType === 'while'
|
||||
)
|
||||
}
|
||||
|
||||
@@ -302,7 +303,7 @@ export const calculateRelativePosition = (
|
||||
* @param getNodes Function to retrieve all nodes from ReactFlow
|
||||
* @param updateBlockPosition Function to update the position of a block
|
||||
* @param updateParentId Function to update the parent ID of a block
|
||||
* @param resizeLoopNodes Function to resize loop nodes after parent update
|
||||
* @param resizeLoopNodes Function to resize loop or parallel or while nodes after parent update
|
||||
*/
|
||||
export const updateNodeParent = (
|
||||
nodeId: string,
|
||||
@@ -336,7 +337,7 @@ export const updateNodeParent = (
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a point is inside a loop or parallel node
|
||||
* Checks if a point is inside a loop or parallel or while node
|
||||
* @param position Position coordinates to check
|
||||
* @param getNodes Function to retrieve all nodes from ReactFlow
|
||||
* @returns The smallest container node containing the point, or null if none
|
||||
@@ -390,7 +391,7 @@ export const isPointInLoopNode = (
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates appropriate dimensions for a loop or parallel node based on its children
|
||||
* Calculates appropriate dimensions for a loop or parallel or while node based on its children
|
||||
* @param nodeId ID of the container node
|
||||
* @param getNodes Function to retrieve all nodes from ReactFlow
|
||||
* @param blocks Block states from workflow store
|
||||
|
||||
@@ -47,6 +47,7 @@ export async function applyAutoLayoutToWorkflow(
|
||||
edges: any[],
|
||||
loops: Record<string, any> = {},
|
||||
parallels: Record<string, any> = {},
|
||||
whiles: Record<string, any> = {},
|
||||
options: AutoLayoutOptions = {}
|
||||
): Promise<{
|
||||
success: boolean
|
||||
@@ -152,7 +153,7 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const { blocks, edges, loops = {}, parallels = {} } = workflowStore
|
||||
const { blocks, edges, loops = {}, parallels = {}, whiles = {} } = workflowStore
|
||||
|
||||
logger.info('Auto layout store data:', {
|
||||
workflowId,
|
||||
@@ -160,6 +161,7 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
edgeCount: edges.length,
|
||||
loopCount: Object.keys(loops).length,
|
||||
parallelCount: Object.keys(parallels).length,
|
||||
whileCount: Object.keys(whiles).length,
|
||||
})
|
||||
|
||||
if (Object.keys(blocks).length === 0) {
|
||||
@@ -174,6 +176,7 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
options
|
||||
)
|
||||
|
||||
@@ -265,5 +268,5 @@ export async function applyAutoLayoutToBlocks(
|
||||
layoutedBlocks?: Record<string, any>
|
||||
error?: string
|
||||
}> {
|
||||
return applyAutoLayoutToWorkflow('preview', blocks, edges, {}, {}, options)
|
||||
return applyAutoLayoutToWorkflow('preview', blocks, edges, {}, {}, {}, options)
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@ const WorkflowContent = React.memo(() => {
|
||||
useStreamCleanup(copilotCleanup)
|
||||
|
||||
// Extract workflow data from the abstraction
|
||||
const { blocks, edges, loops, parallels, isDiffMode } = currentWorkflow
|
||||
const { blocks, edges, isDiffMode } = currentWorkflow
|
||||
|
||||
// Get diff analysis for edge reconstruction
|
||||
const { diffAnalysis, isShowingDiff, isDiffReady } = useWorkflowDiffStore()
|
||||
@@ -462,6 +462,8 @@ const WorkflowContent = React.memo(() => {
|
||||
sourceHandle = 'loop-end-source'
|
||||
} else if (block.type === 'parallel') {
|
||||
sourceHandle = 'parallel-end-source'
|
||||
} else if (block.type === 'while') {
|
||||
sourceHandle = 'while-end-source'
|
||||
}
|
||||
|
||||
return sourceHandle
|
||||
@@ -481,14 +483,19 @@ const WorkflowContent = React.memo(() => {
|
||||
if (type === 'connectionBlock') return
|
||||
|
||||
// Special handling for container nodes (loop or parallel)
|
||||
if (type === 'loop' || type === 'parallel') {
|
||||
if (type === 'loop' || type === 'parallel' || type === 'while') {
|
||||
// Create a unique ID and name for the container
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Auto-number the blocks based on existing blocks of the same type
|
||||
const existingBlocksOfType = Object.values(blocks).filter((b) => b.type === type)
|
||||
const blockNumber = existingBlocksOfType.length + 1
|
||||
const name = type === 'loop' ? `Loop ${blockNumber}` : `Parallel ${blockNumber}`
|
||||
const name =
|
||||
type === 'loop'
|
||||
? `Loop ${blockNumber}`
|
||||
: type === 'parallel'
|
||||
? `Parallel ${blockNumber}`
|
||||
: `While ${blockNumber}`
|
||||
|
||||
// Calculate the center position of the viewport
|
||||
const centerPosition = project({
|
||||
@@ -615,21 +622,30 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Clear any drag-over styling
|
||||
document
|
||||
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over')
|
||||
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
|
||||
.forEach((el) => {
|
||||
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
|
||||
el.classList.remove(
|
||||
'loop-node-drag-over',
|
||||
'parallel-node-drag-over',
|
||||
'while-node-drag-over'
|
||||
)
|
||||
})
|
||||
document.body.style.cursor = ''
|
||||
|
||||
// Special handling for container nodes (loop or parallel)
|
||||
if (data.type === 'loop' || data.type === 'parallel') {
|
||||
if (data.type === 'loop' || data.type === 'parallel' || data.type === 'while') {
|
||||
// Create a unique ID and name for the container
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Auto-number the blocks based on existing blocks of the same type
|
||||
const existingBlocksOfType = Object.values(blocks).filter((b) => b.type === data.type)
|
||||
const blockNumber = existingBlocksOfType.length + 1
|
||||
const name = data.type === 'loop' ? `Loop ${blockNumber}` : `Parallel ${blockNumber}`
|
||||
const name =
|
||||
data.type === 'loop'
|
||||
? `Loop ${blockNumber}`
|
||||
: data.type === 'parallel'
|
||||
? `Parallel ${blockNumber}`
|
||||
: `While ${blockNumber}`
|
||||
|
||||
// Check if we're dropping inside another container
|
||||
if (containerInfo) {
|
||||
@@ -691,7 +707,12 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(data.type)
|
||||
if (!blockConfig && data.type !== 'loop' && data.type !== 'parallel') {
|
||||
if (
|
||||
!blockConfig &&
|
||||
data.type !== 'loop' &&
|
||||
data.type !== 'parallel' &&
|
||||
data.type !== 'while'
|
||||
) {
|
||||
logger.error('Invalid block type:', { data })
|
||||
return
|
||||
}
|
||||
@@ -703,7 +724,9 @@ const WorkflowContent = React.memo(() => {
|
||||
? `Loop ${Object.values(blocks).filter((b) => b.type === 'loop').length + 1}`
|
||||
: data.type === 'parallel'
|
||||
? `Parallel ${Object.values(blocks).filter((b) => b.type === 'parallel').length + 1}`
|
||||
: `${blockConfig!.name} ${Object.values(blocks).filter((b) => b.type === data.type).length + 1}`
|
||||
: data.type === 'while'
|
||||
? `While ${Object.values(blocks).filter((b) => b.type === 'while').length + 1}`
|
||||
: `${blockConfig!.name} ${Object.values(blocks).filter((b) => b.type === data.type).length + 1}`
|
||||
|
||||
if (containerInfo) {
|
||||
// Calculate position relative to the container node
|
||||
@@ -762,7 +785,9 @@ const WorkflowContent = React.memo(() => {
|
||||
const startSourceHandle =
|
||||
(containerNode?.data as any)?.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: 'parallel-start-source'
|
||||
: data.type === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
@@ -833,9 +858,13 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Clear any previous highlighting
|
||||
document
|
||||
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over')
|
||||
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
|
||||
.forEach((el) => {
|
||||
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
|
||||
el.classList.remove(
|
||||
'loop-node-drag-over',
|
||||
'parallel-node-drag-over',
|
||||
'while-node-drag-over'
|
||||
)
|
||||
})
|
||||
|
||||
// If hovering over a container node, highlight it
|
||||
@@ -854,6 +883,11 @@ const WorkflowContent = React.memo(() => {
|
||||
(containerNode.data as any)?.kind === 'parallel'
|
||||
) {
|
||||
containerElement.classList.add('parallel-node-drag-over')
|
||||
} else if (
|
||||
containerNode?.type === 'subflowNode' &&
|
||||
(containerNode.data as any)?.kind === 'while'
|
||||
) {
|
||||
containerElement.classList.add('while-node-drag-over')
|
||||
}
|
||||
document.body.style.cursor = 'copy'
|
||||
}
|
||||
@@ -983,7 +1017,7 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
|
||||
// Handle container nodes differently
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
|
||||
const hasNestedError = nestedSubflowErrors.has(block.id)
|
||||
nodeArray.push({
|
||||
id: block.id,
|
||||
@@ -997,7 +1031,7 @@ const WorkflowContent = React.memo(() => {
|
||||
width: block.data?.width || 500,
|
||||
height: block.data?.height || 300,
|
||||
hasNestedError,
|
||||
kind: block.type === 'loop' ? 'loop' : 'parallel',
|
||||
kind: block.type === 'loop' ? 'loop' : block.type === 'parallel' ? 'parallel' : 'while',
|
||||
},
|
||||
})
|
||||
return
|
||||
@@ -1144,7 +1178,8 @@ const WorkflowContent = React.memo(() => {
|
||||
const sourceParentId =
|
||||
sourceNode.parentId ||
|
||||
(connection.sourceHandle === 'loop-start-source' ||
|
||||
connection.sourceHandle === 'parallel-start-source'
|
||||
connection.sourceHandle === 'parallel-start-source' ||
|
||||
connection.sourceHandle === 'while-start-source'
|
||||
? connection.source
|
||||
: undefined)
|
||||
const targetParentId = targetNode.parentId
|
||||
@@ -1155,7 +1190,8 @@ const WorkflowContent = React.memo(() => {
|
||||
// Special case for container start source: Always allow connections to nodes within the same container
|
||||
if (
|
||||
(connection.sourceHandle === 'loop-start-source' ||
|
||||
connection.sourceHandle === 'parallel-start-source') &&
|
||||
connection.sourceHandle === 'parallel-start-source' ||
|
||||
connection.sourceHandle === 'while-start-source') &&
|
||||
targetNode.parentId === sourceNode.id
|
||||
) {
|
||||
// This is a connection from container start to a node inside the container - always allow
|
||||
@@ -1222,7 +1258,11 @@ const WorkflowContent = React.memo(() => {
|
||||
if (potentialParentId) {
|
||||
const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`)
|
||||
if (prevElement) {
|
||||
prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
|
||||
prevElement.classList.remove(
|
||||
'loop-node-drag-over',
|
||||
'parallel-node-drag-over',
|
||||
'while-node-drag-over'
|
||||
)
|
||||
}
|
||||
setPotentialParentId(null)
|
||||
document.body.style.cursor = ''
|
||||
@@ -1342,6 +1382,11 @@ const WorkflowContent = React.memo(() => {
|
||||
(bestContainerMatch.container.data as any)?.kind === 'parallel'
|
||||
) {
|
||||
containerElement.classList.add('parallel-node-drag-over')
|
||||
} else if (
|
||||
bestContainerMatch.container.type === 'subflowNode' &&
|
||||
(bestContainerMatch.container.data as any)?.kind === 'while'
|
||||
) {
|
||||
containerElement.classList.add('while-node-drag-over')
|
||||
}
|
||||
document.body.style.cursor = 'copy'
|
||||
}
|
||||
@@ -1350,7 +1395,11 @@ const WorkflowContent = React.memo(() => {
|
||||
if (potentialParentId) {
|
||||
const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`)
|
||||
if (prevElement) {
|
||||
prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
|
||||
prevElement.classList.remove(
|
||||
'loop-node-drag-over',
|
||||
'parallel-node-drag-over',
|
||||
'while-node-drag-over'
|
||||
)
|
||||
}
|
||||
setPotentialParentId(null)
|
||||
document.body.style.cursor = ''
|
||||
@@ -1382,9 +1431,15 @@ const WorkflowContent = React.memo(() => {
|
||||
const onNodeDragStop = useCallback(
|
||||
(_event: React.MouseEvent, node: any) => {
|
||||
// Clear UI effects
|
||||
document.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over').forEach((el) => {
|
||||
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
|
||||
})
|
||||
document
|
||||
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
|
||||
.forEach((el) => {
|
||||
el.classList.remove(
|
||||
'loop-node-drag-over',
|
||||
'parallel-node-drag-over',
|
||||
'while-node-drag-over'
|
||||
)
|
||||
})
|
||||
document.body.style.cursor = ''
|
||||
|
||||
// Emit collaborative position update for the final position
|
||||
@@ -1477,7 +1532,9 @@ const WorkflowContent = React.memo(() => {
|
||||
const startSourceHandle =
|
||||
(containerNode?.data as any)?.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: 'parallel-start-source'
|
||||
: (containerNode?.data as any)?.kind === 'parallel'
|
||||
? 'parallel-start-source'
|
||||
: 'while-start-source'
|
||||
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
|
||||
@@ -148,7 +148,7 @@ export function SearchModal({
|
||||
})
|
||||
)
|
||||
|
||||
// Add special blocks (loop and parallel)
|
||||
// Add special blocks (loop, parallel, and while)
|
||||
const specialBlocks: BlockItem[] = [
|
||||
{
|
||||
id: 'loop',
|
||||
@@ -166,6 +166,14 @@ export function SearchModal({
|
||||
bgColor: '#FEE12B',
|
||||
type: 'parallel',
|
||||
},
|
||||
{
|
||||
id: 'while',
|
||||
name: 'While',
|
||||
description: 'While Loop',
|
||||
icon: RepeatIcon,
|
||||
bgColor: '#FF9F43',
|
||||
type: 'while',
|
||||
},
|
||||
]
|
||||
|
||||
return [...regularBlocks, ...specialBlocks].sort((a, b) => a.name.localeCompare(b.name))
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { WhileTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/while/while-config'
|
||||
|
||||
type WhileToolbarItemProps = {
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
// Custom component for the While Tool
|
||||
export default function WhileToolbarItem({ disabled = false }: WhileToolbarItemProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
// Only send the essential data for the while node
|
||||
const simplifiedData = {
|
||||
type: 'while',
|
||||
}
|
||||
e.dataTransfer.setData('application/json', JSON.stringify(simplifiedData))
|
||||
e.dataTransfer.effectAllowed = 'move'
|
||||
}
|
||||
|
||||
// Handle click to add while block
|
||||
const handleClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (disabled) return
|
||||
|
||||
// Dispatch a custom event to be caught by the workflow component
|
||||
const event = new CustomEvent('add-block-from-toolbar', {
|
||||
detail: {
|
||||
type: 'while',
|
||||
clientX: e.clientX,
|
||||
clientY: e.clientY,
|
||||
},
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
},
|
||||
[disabled]
|
||||
)
|
||||
|
||||
const blockContent = (
|
||||
<div
|
||||
draggable={!disabled}
|
||||
onDragStart={handleDragStart}
|
||||
onClick={handleClick}
|
||||
className={cn(
|
||||
'group flex h-8 items-center gap-[10px] rounded-[8px] p-2 transition-colors',
|
||||
disabled
|
||||
? 'cursor-not-allowed opacity-60'
|
||||
: 'cursor-pointer hover:bg-muted active:cursor-grabbing'
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className='relative flex h-6 w-6 shrink-0 items-center justify-center overflow-hidden rounded-[6px]'
|
||||
style={{ backgroundColor: WhileTool.bgColor }}
|
||||
>
|
||||
<WhileTool.icon
|
||||
className={cn(
|
||||
'h-[14px] w-[14px] text-white transition-transform duration-200',
|
||||
!disabled && 'group-hover:scale-110'
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<span className='font-medium text-sm leading-none'>{WhileTool.name}</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
if (disabled) {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
return blockContent
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { ToolbarBlock } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-block/toolbar-block'
|
||||
import LoopToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-loop-block/toolbar-loop-block'
|
||||
import ParallelToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-parallel-block/toolbar-parallel-block'
|
||||
import WhileToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-while-block/toolbar-while-block'
|
||||
import { getAllBlocks } from '@/blocks'
|
||||
import type { WorkspaceUserPermissions } from '@/hooks/use-user-permissions'
|
||||
|
||||
@@ -54,7 +55,7 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
|
||||
}))
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
|
||||
// Create special blocks (loop and parallel) if they match search
|
||||
// Create special blocks (loop, parallel, and while) if they match search
|
||||
const specialBlockItems: BlockItem[] = []
|
||||
|
||||
if (!searchQuery.trim() || 'loop'.toLowerCase().includes(searchQuery.toLowerCase())) {
|
||||
@@ -73,6 +74,14 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
|
||||
})
|
||||
}
|
||||
|
||||
if (!searchQuery.trim() || 'while'.toLowerCase().includes(searchQuery.toLowerCase())) {
|
||||
specialBlockItems.push({
|
||||
name: 'While',
|
||||
type: 'while',
|
||||
isCustom: true,
|
||||
})
|
||||
}
|
||||
|
||||
// Sort special blocks alphabetically
|
||||
specialBlockItems.sort((a, b) => a.name.localeCompare(b.name))
|
||||
|
||||
@@ -128,7 +137,7 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Special Blocks Section (Loop & Parallel) */}
|
||||
{/* Special Blocks Section (Loop, Parallel, and While) */}
|
||||
{specialBlocks.map((block) => {
|
||||
if (block.type === 'loop') {
|
||||
return <LoopToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
|
||||
@@ -136,6 +145,9 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
|
||||
if (block.type === 'parallel') {
|
||||
return <ParallelToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
|
||||
}
|
||||
if (block.type === 'while') {
|
||||
return <WhileToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
|
||||
}
|
||||
return null
|
||||
})}
|
||||
|
||||
|
||||
@@ -1007,8 +1007,11 @@ export function Sidebar() {
|
||||
>
|
||||
<UsageIndicator
|
||||
onClick={() => {
|
||||
const isBlocked = useSubscriptionStore.getState().getBillingStatus() === 'blocked'
|
||||
if (isBlocked) {
|
||||
const subscriptionStore = useSubscriptionStore.getState()
|
||||
const isBlocked = subscriptionStore.getBillingStatus() === 'blocked'
|
||||
const canUpgrade = subscriptionStore.canUpgrade()
|
||||
|
||||
if (isBlocked || !canUpgrade) {
|
||||
if (typeof window !== 'undefined') {
|
||||
window.dispatchEvent(
|
||||
new CustomEvent('open-settings', { detail: { tab: 'subscription' } })
|
||||
|
||||
@@ -83,6 +83,14 @@ export function WorkflowPreview({
|
||||
}
|
||||
}, [workflowState.parallels, isValidWorkflowState])
|
||||
|
||||
const whilesStructure = useMemo(() => {
|
||||
if (!isValidWorkflowState) return { count: 0, ids: '' }
|
||||
return {
|
||||
count: Object.keys(workflowState.whiles || {}).length,
|
||||
ids: Object.keys(workflowState.whiles || {}).join(','),
|
||||
}
|
||||
}, [workflowState.whiles, isValidWorkflowState])
|
||||
|
||||
const edgesStructure = useMemo(() => {
|
||||
if (!isValidWorkflowState) return { count: 0, ids: '' }
|
||||
return {
|
||||
@@ -166,6 +174,26 @@ export function WorkflowPreview({
|
||||
return
|
||||
}
|
||||
|
||||
if (block.type === 'while') {
|
||||
nodeArray.push({
|
||||
id: block.id,
|
||||
type: 'subflowNode',
|
||||
position: absolutePosition,
|
||||
parentId: block.data?.parentId,
|
||||
extent: block.data?.extent || undefined,
|
||||
draggable: false,
|
||||
data: {
|
||||
...block.data,
|
||||
width: block.data?.width || 500,
|
||||
height: block.data?.height || 300,
|
||||
state: 'valid',
|
||||
isPreview: true,
|
||||
kind: 'while',
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
if (!blockConfig) {
|
||||
logger.error(`No configuration found for block type: ${block.type}`, { blockId })
|
||||
@@ -229,6 +257,7 @@ export function WorkflowPreview({
|
||||
blocksStructure,
|
||||
loopsStructure,
|
||||
parallelsStructure,
|
||||
whilesStructure,
|
||||
showSubBlocks,
|
||||
workflowState.blocks,
|
||||
isValidWorkflowState,
|
||||
|
||||
@@ -129,7 +129,7 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
|
||||
edges,
|
||||
loops || {},
|
||||
parallels || {},
|
||||
true // Enable validation during execution
|
||||
{} // Enable validation during execution
|
||||
)
|
||||
|
||||
// Handle special Airtable case
|
||||
|
||||
@@ -118,7 +118,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
edges,
|
||||
loops || {},
|
||||
parallels || {},
|
||||
true // Enable validation during execution
|
||||
{} // Enable validation during execution
|
||||
)
|
||||
|
||||
// Create executor and execute
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
import { MySQLIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { MySQLResponse } from '@/tools/mysql/types'
|
||||
|
||||
export const MySQLBlock: BlockConfig<MySQLResponse> = {
|
||||
type: 'mysql',
|
||||
name: 'MySQL',
|
||||
description: 'Connect to MySQL database',
|
||||
longDescription:
|
||||
'Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
|
||||
docsLink: 'https://docs.sim.ai/tools/mysql',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: MySQLIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Query (SELECT)', id: 'query' },
|
||||
{ label: 'Insert Data', id: 'insert' },
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
{
|
||||
id: 'host',
|
||||
title: 'Host',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'localhost or your.database.host',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'port',
|
||||
title: 'Port',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: '3306',
|
||||
value: () => '3306',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'database',
|
||||
title: 'Database Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'your_database',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'root',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
password: true,
|
||||
placeholder: 'Your database password',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ssl',
|
||||
title: 'SSL Mode',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
{ label: 'Required', id: 'required' },
|
||||
{ label: 'Preferred', id: 'preferred' },
|
||||
],
|
||||
value: () => 'preferred',
|
||||
},
|
||||
// Table field for insert/update/delete operations
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
// SQL Query field
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM users WHERE active = true',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM table_name',
|
||||
condition: { field: 'operation', value: 'execute' },
|
||||
required: true,
|
||||
},
|
||||
// Data for insert operations
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
// Set clause for updates
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Update Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
// Where clause for update/delete
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'query':
|
||||
return 'mysql_query'
|
||||
case 'insert':
|
||||
return 'mysql_insert'
|
||||
case 'update':
|
||||
return 'mysql_update'
|
||||
case 'delete':
|
||||
return 'mysql_delete'
|
||||
case 'execute':
|
||||
return 'mysql_execute'
|
||||
default:
|
||||
throw new Error(`Invalid MySQL operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, ...rest } = params
|
||||
|
||||
// Parse JSON data if it's a string
|
||||
let parsedData
|
||||
if (data && typeof data === 'string' && data.trim()) {
|
||||
try {
|
||||
parsedData = JSON.parse(data)
|
||||
} catch (parseError) {
|
||||
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
|
||||
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
|
||||
}
|
||||
} else if (data && typeof data === 'object') {
|
||||
parsedData = data
|
||||
}
|
||||
|
||||
// Build connection config
|
||||
const connectionConfig = {
|
||||
host: rest.host,
|
||||
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 3306,
|
||||
database: rest.database,
|
||||
username: rest.username,
|
||||
password: rest.password,
|
||||
ssl: rest.ssl || 'preferred',
|
||||
}
|
||||
|
||||
// Build params object
|
||||
const result: any = { ...connectionConfig }
|
||||
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Database operation to perform' },
|
||||
host: { type: 'string', description: 'Database host' },
|
||||
port: { type: 'string', description: 'Database port' },
|
||||
database: { type: 'string', description: 'Database name' },
|
||||
username: { type: 'string', description: 'Database username' },
|
||||
password: { type: 'string', description: 'Database password' },
|
||||
ssl: { type: 'string', description: 'SSL mode' },
|
||||
table: { type: 'string', description: 'Table name' },
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message describing the operation outcome',
|
||||
},
|
||||
rows: {
|
||||
type: 'array',
|
||||
description: 'Array of rows returned from the query',
|
||||
},
|
||||
rowCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
import { ParallelIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export const ParallelBlock: BlockConfig<ToolResponse> = {
|
||||
type: 'parallel_ai',
|
||||
name: 'Parallel AI',
|
||||
description: 'Search with Parallel AI',
|
||||
longDescription:
|
||||
"Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.",
|
||||
docsLink: 'https://docs.parallel.ai/search-api/search-quickstart',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: ParallelIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'objective',
|
||||
title: 'Search Objective',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: "When was the United Nations established? Prefer UN's websites.",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'search_queries',
|
||||
title: 'Search Queries',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder:
|
||||
'Enter search queries separated by commas (e.g., "Founding year UN", "Year of founding United Nations")',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'processor',
|
||||
title: 'Processor',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Base', id: 'base' },
|
||||
{ label: 'Pro', id: 'pro' },
|
||||
],
|
||||
value: () => 'base',
|
||||
},
|
||||
{
|
||||
id: 'max_results',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: '5',
|
||||
},
|
||||
{
|
||||
id: 'max_chars_per_result',
|
||||
title: 'Max Chars',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: '1500',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter your Parallel AI API key',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['parallel_search'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert search_queries from comma-separated string to array (if provided)
|
||||
if (params.search_queries && typeof params.search_queries === 'string') {
|
||||
const queries = params.search_queries
|
||||
.split(',')
|
||||
.map((query: string) => query.trim())
|
||||
.filter((query: string) => query.length > 0)
|
||||
// Only set if we have actual queries
|
||||
if (queries.length > 0) {
|
||||
params.search_queries = queries
|
||||
} else {
|
||||
params.search_queries = undefined
|
||||
}
|
||||
}
|
||||
|
||||
// Convert numeric parameters
|
||||
if (params.max_results) {
|
||||
params.max_results = Number(params.max_results)
|
||||
}
|
||||
if (params.max_chars_per_result) {
|
||||
params.max_chars_per_result = Number(params.max_chars_per_result)
|
||||
}
|
||||
|
||||
return 'parallel_search'
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
objective: { type: 'string', description: 'Search objective or question' },
|
||||
search_queries: { type: 'string', description: 'Comma-separated search queries' },
|
||||
processor: { type: 'string', description: 'Processing method' },
|
||||
max_results: { type: 'number', description: 'Maximum number of results' },
|
||||
max_chars_per_result: { type: 'number', description: 'Maximum characters per result' },
|
||||
apiKey: { type: 'string', description: 'Parallel AI API key' },
|
||||
},
|
||||
outputs: {
|
||||
results: { type: 'array', description: 'Search results with excerpts from relevant pages' },
|
||||
},
|
||||
}
|
||||
@@ -1,261 +0,0 @@
|
||||
import { PostgresIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { PostgresResponse } from '@/tools/postgresql/types'
|
||||
|
||||
export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
|
||||
type: 'postgresql',
|
||||
name: 'PostgreSQL',
|
||||
description: 'Connect to PostgreSQL database',
|
||||
longDescription:
|
||||
'Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
|
||||
docsLink: 'https://docs.sim.ai/tools/postgresql',
|
||||
category: 'tools',
|
||||
bgColor: '#336791',
|
||||
icon: PostgresIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Query (SELECT)', id: 'query' },
|
||||
{ label: 'Insert Data', id: 'insert' },
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
{
|
||||
id: 'host',
|
||||
title: 'Host',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'localhost or your.database.host',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'port',
|
||||
title: 'Port',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: '5432',
|
||||
value: () => '5432',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'database',
|
||||
title: 'Database Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'your_database',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'postgres',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
password: true,
|
||||
placeholder: 'Your database password',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ssl',
|
||||
title: 'SSL Mode',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
{ label: 'Required', id: 'required' },
|
||||
{ label: 'Preferred', id: 'preferred' },
|
||||
],
|
||||
value: () => 'preferred',
|
||||
},
|
||||
// Table field for insert/update/delete operations
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
// SQL Query field
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM users WHERE active = true',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM table_name',
|
||||
condition: { field: 'operation', value: 'execute' },
|
||||
required: true,
|
||||
},
|
||||
// Data for insert operations
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
// Set clause for updates
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Update Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
// Where clause for update/delete
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'postgresql_query',
|
||||
'postgresql_insert',
|
||||
'postgresql_update',
|
||||
'postgresql_delete',
|
||||
'postgresql_execute',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'query':
|
||||
return 'postgresql_query'
|
||||
case 'insert':
|
||||
return 'postgresql_insert'
|
||||
case 'update':
|
||||
return 'postgresql_update'
|
||||
case 'delete':
|
||||
return 'postgresql_delete'
|
||||
case 'execute':
|
||||
return 'postgresql_execute'
|
||||
default:
|
||||
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, ...rest } = params
|
||||
|
||||
// Parse JSON data if it's a string
|
||||
let parsedData
|
||||
if (data && typeof data === 'string' && data.trim()) {
|
||||
try {
|
||||
parsedData = JSON.parse(data)
|
||||
} catch (parseError) {
|
||||
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
|
||||
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
|
||||
}
|
||||
} else if (data && typeof data === 'object') {
|
||||
parsedData = data
|
||||
}
|
||||
|
||||
// Build connection config
|
||||
const connectionConfig = {
|
||||
host: rest.host,
|
||||
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 5432,
|
||||
database: rest.database,
|
||||
username: rest.username,
|
||||
password: rest.password,
|
||||
ssl: rest.ssl || 'preferred',
|
||||
}
|
||||
|
||||
// Build params object
|
||||
const result: any = { ...connectionConfig }
|
||||
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Database operation to perform' },
|
||||
host: { type: 'string', description: 'Database host' },
|
||||
port: { type: 'string', description: 'Database port' },
|
||||
database: { type: 'string', description: 'Database name' },
|
||||
username: { type: 'string', description: 'Database username' },
|
||||
password: { type: 'string', description: 'Database password' },
|
||||
ssl: { type: 'string', description: 'SSL mode' },
|
||||
table: { type: 'string', description: 'Table name' },
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message describing the operation outcome',
|
||||
},
|
||||
rows: {
|
||||
type: 'array',
|
||||
description: 'Array of rows returned from the query',
|
||||
},
|
||||
rowCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -40,15 +40,12 @@ import { MicrosoftExcelBlock } from '@/blocks/blocks/microsoft_excel'
|
||||
import { MicrosoftPlannerBlock } from '@/blocks/blocks/microsoft_planner'
|
||||
import { MicrosoftTeamsBlock } from '@/blocks/blocks/microsoft_teams'
|
||||
import { MistralParseBlock } from '@/blocks/blocks/mistral_parse'
|
||||
import { MySQLBlock } from '@/blocks/blocks/mysql'
|
||||
import { NotionBlock } from '@/blocks/blocks/notion'
|
||||
import { OneDriveBlock } from '@/blocks/blocks/onedrive'
|
||||
import { OpenAIBlock } from '@/blocks/blocks/openai'
|
||||
import { OutlookBlock } from '@/blocks/blocks/outlook'
|
||||
import { ParallelBlock } from '@/blocks/blocks/parallel'
|
||||
import { PerplexityBlock } from '@/blocks/blocks/perplexity'
|
||||
import { PineconeBlock } from '@/blocks/blocks/pinecone'
|
||||
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
|
||||
import { QdrantBlock } from '@/blocks/blocks/qdrant'
|
||||
import { RedditBlock } from '@/blocks/blocks/reddit'
|
||||
import { ResponseBlock } from '@/blocks/blocks/response'
|
||||
@@ -116,15 +113,12 @@ export const registry: Record<string, BlockConfig> = {
|
||||
microsoft_planner: MicrosoftPlannerBlock,
|
||||
microsoft_teams: MicrosoftTeamsBlock,
|
||||
mistral_parse: MistralParseBlock,
|
||||
mysql: MySQLBlock,
|
||||
notion: NotionBlock,
|
||||
openai: OpenAIBlock,
|
||||
outlook: OutlookBlock,
|
||||
onedrive: OneDriveBlock,
|
||||
parallel_ai: ParallelBlock,
|
||||
perplexity: PerplexityBlock,
|
||||
pinecone: PineconeBlock,
|
||||
postgresql: PostgreSQLBlock,
|
||||
qdrant: QdrantBlock,
|
||||
memory: MemoryBlock,
|
||||
reddit: RedditBlock,
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { ToolResponse } from '@/tools/types'
|
||||
// Basic types
|
||||
export type BlockIcon = (props: SVGProps<SVGSVGElement>) => JSX.Element
|
||||
export type ParamType = 'string' | 'number' | 'boolean' | 'json'
|
||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'array' | 'any'
|
||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'any'
|
||||
|
||||
// Block classification
|
||||
export type BlockCategory = 'blocks' | 'tools' | 'triggers'
|
||||
|
||||
@@ -3345,98 +3345,6 @@ export function MicrosoftPlannerIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function ParallelIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
fill='currentColor'
|
||||
width='271'
|
||||
height='270'
|
||||
viewBox='0 0 271 270'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function PostgresIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='800px'
|
||||
height='800px'
|
||||
viewBox='-4 0 264 264'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMinYMin meet'
|
||||
>
|
||||
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
|
||||
<path
|
||||
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
|
||||
fill='#336791'
|
||||
/>
|
||||
<path
|
||||
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
|
||||
fill='#FFF'
|
||||
/>
|
||||
<path
|
||||
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
|
||||
fill='#FFF'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function MySQLIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='64'
|
||||
height='64'
|
||||
viewBox='0 0 25.6 25.6'
|
||||
>
|
||||
<path
|
||||
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
|
||||
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
|
||||
fillRule='evenodd'
|
||||
fill='#00678c'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function OpenRouterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -55,6 +55,7 @@ const BLOCK_COLORS = {
|
||||
DEFAULT: '#2F55FF',
|
||||
LOOP: '#2FB3FF',
|
||||
PARALLEL: '#FEE12B',
|
||||
WHILE: '#57D9A3',
|
||||
} as const
|
||||
|
||||
const TAG_PREFIXES = {
|
||||
@@ -294,6 +295,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const loops = useWorkflowStore((state) => state.loops)
|
||||
const parallels = useWorkflowStore((state) => state.parallels)
|
||||
const whiles = useWorkflowStore((state) => state.whiles)
|
||||
const edges = useWorkflowStore((state) => state.edges)
|
||||
const workflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
|
||||
@@ -321,7 +323,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const blockConfig = getBlock(sourceBlock.type)
|
||||
|
||||
if (!blockConfig) {
|
||||
if (sourceBlock.type === 'loop' || sourceBlock.type === 'parallel') {
|
||||
if (
|
||||
sourceBlock.type === 'loop' ||
|
||||
sourceBlock.type === 'parallel' ||
|
||||
sourceBlock.type === 'while'
|
||||
) {
|
||||
const mockConfig = {
|
||||
outputs: {
|
||||
results: 'array',
|
||||
@@ -467,13 +473,26 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
|
||||
const serializer = new Serializer()
|
||||
const serializedWorkflow = serializer.serializeWorkflow(blocks, edges, loops, parallels)
|
||||
const serializedWorkflow = serializer.serializeWorkflow(blocks, edges, loops, parallels, whiles)
|
||||
|
||||
const accessibleBlockIds = BlockPathCalculator.findAllPathNodes(
|
||||
serializedWorkflow.connections,
|
||||
blockId
|
||||
)
|
||||
|
||||
// If editing a while block condition, also include children inside the while container
|
||||
const sourceBlock = blocks[blockId]
|
||||
if (sourceBlock && sourceBlock.type === 'while') {
|
||||
const whileCfg = whiles[blockId]
|
||||
if (whileCfg && Array.isArray(whileCfg.nodes)) {
|
||||
whileCfg.nodes.forEach((childId: string) => {
|
||||
if (!accessibleBlockIds.includes(childId)) {
|
||||
accessibleBlockIds.push(childId)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const starterBlock = Object.values(blocks).find((block) => block.type === 'starter')
|
||||
if (starterBlock && !accessibleBlockIds.includes(starterBlock.id)) {
|
||||
accessibleBlockIds.push(starterBlock.id)
|
||||
@@ -551,6 +570,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
|
||||
let parallelBlockGroup: BlockTagGroup | null = null
|
||||
let whileBlockGroup: BlockTagGroup | null = null
|
||||
const containingParallel = Object.entries(parallels || {}).find(([_, parallel]) =>
|
||||
parallel.nodes.includes(blockId)
|
||||
)
|
||||
@@ -579,6 +599,27 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
}
|
||||
|
||||
const containingWhile = Object.entries(whiles || {}).find(([_, w]) => w.nodes.includes(blockId))
|
||||
let containingWhileBlockId: string | null = null
|
||||
if (containingWhile) {
|
||||
const [whileId] = containingWhile
|
||||
containingWhileBlockId = whileId
|
||||
const contextualTags: string[] = ['index']
|
||||
|
||||
const containingWhileBlock = blocks[whileId]
|
||||
if (containingWhileBlock) {
|
||||
const whileBlockName = containingWhileBlock.name || containingWhileBlock.type
|
||||
|
||||
whileBlockGroup = {
|
||||
blockName: whileBlockName,
|
||||
blockId: whileId,
|
||||
blockType: 'while',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const blockTagGroups: BlockTagGroup[] = []
|
||||
const allBlockTags: string[] = []
|
||||
|
||||
@@ -589,11 +630,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const blockConfig = getBlock(accessibleBlock.type)
|
||||
|
||||
if (!blockConfig) {
|
||||
if (accessibleBlock.type === 'loop' || accessibleBlock.type === 'parallel') {
|
||||
if (
|
||||
accessibleBlock.type === 'loop' ||
|
||||
accessibleBlock.type === 'parallel' ||
|
||||
accessibleBlock.type === 'while'
|
||||
) {
|
||||
// Skip this block if it's the containing loop/parallel block - we'll handle it with contextual tags
|
||||
if (
|
||||
accessibleBlockId === containingLoopBlockId ||
|
||||
accessibleBlockId === containingParallelBlockId
|
||||
accessibleBlockId === containingParallelBlockId ||
|
||||
accessibleBlockId === containingWhileBlockId
|
||||
) {
|
||||
continue
|
||||
}
|
||||
@@ -729,6 +775,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (parallelBlockGroup) {
|
||||
finalBlockTagGroups.push(parallelBlockGroup)
|
||||
}
|
||||
if (whileBlockGroup) {
|
||||
finalBlockTagGroups.push(whileBlockGroup)
|
||||
}
|
||||
|
||||
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
||||
finalBlockTagGroups.push(...blockTagGroups)
|
||||
@@ -740,13 +789,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (parallelBlockGroup) {
|
||||
contextualTags.push(...parallelBlockGroup.tags)
|
||||
}
|
||||
if (whileBlockGroup) {
|
||||
contextualTags.push(...whileBlockGroup.tags)
|
||||
}
|
||||
|
||||
return {
|
||||
tags: [...variableTags, ...contextualTags, ...allBlockTags],
|
||||
variableInfoMap,
|
||||
blockTagGroups: finalBlockTagGroups,
|
||||
}
|
||||
}, [blocks, edges, loops, parallels, blockId, activeSourceBlockId, workflowVariables])
|
||||
}, [blocks, edges, loops, parallels, whiles, blockId, activeSourceBlockId, workflowVariables])
|
||||
|
||||
const filteredTags = useMemo(() => {
|
||||
if (!searchTerm) return tags
|
||||
@@ -806,9 +858,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
})
|
||||
} else {
|
||||
const path = tagParts.slice(1).join('.')
|
||||
// Handle contextual tags for loop/parallel blocks (single words like 'index', 'currentItem')
|
||||
// Handle contextual tags for loop/parallel/while blocks (single words like 'index', 'currentItem')
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
(group.blockType === 'loop' ||
|
||||
group.blockType === 'parallel' ||
|
||||
group.blockType === 'while') &&
|
||||
tagParts.length === 1
|
||||
) {
|
||||
directTags.push({
|
||||
@@ -912,7 +966,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
} else if (
|
||||
blockGroup &&
|
||||
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
|
||||
(blockGroup.blockType === 'loop' ||
|
||||
blockGroup.blockType === 'parallel' ||
|
||||
blockGroup.blockType === 'while')
|
||||
) {
|
||||
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) {
|
||||
processedTag = `${blockGroup.blockType}.${tag}`
|
||||
@@ -1283,6 +1339,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockColor = BLOCK_COLORS.LOOP
|
||||
} else if (group.blockType === 'parallel') {
|
||||
blockColor = BLOCK_COLORS.PARALLEL
|
||||
} else if (group.blockType === 'while') {
|
||||
blockColor = BLOCK_COLORS.WHILE
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -1305,7 +1363,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
let tagIcon = group.blockName.charAt(0).toUpperCase()
|
||||
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
(group.blockType === 'loop' ||
|
||||
group.blockType === 'parallel' ||
|
||||
group.blockType === 'while') &&
|
||||
!nestedTag.key.includes('.')
|
||||
) {
|
||||
if (nestedTag.key === 'index') {
|
||||
|
||||
@@ -391,6 +391,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
edges: mergedEdges,
|
||||
loops: workflowState.loops || existing.loops || {},
|
||||
parallels: workflowState.parallels || existing.parallels || {},
|
||||
whiles: workflowState.whiles || existing.whiles || {},
|
||||
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
|
||||
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
|
||||
deployedAt: workflowState.deployedAt || existing.deployedAt,
|
||||
@@ -532,6 +533,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
edges: mergedEdges,
|
||||
loops: workflowState.loops || existing.loops || {},
|
||||
parallels: workflowState.parallels || existing.parallels || {},
|
||||
whiles: workflowState.whiles || existing.whiles || {},
|
||||
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
|
||||
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
|
||||
deployedAt: workflowState.deployedAt || existing.deployedAt,
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
export enum BlockType {
|
||||
PARALLEL = 'parallel',
|
||||
LOOP = 'loop',
|
||||
WHILE = 'while',
|
||||
ROUTER = 'router',
|
||||
CONDITION = 'condition',
|
||||
FUNCTION = 'function',
|
||||
|
||||
@@ -6,6 +6,7 @@ import { FunctionBlockHandler } from '@/executor/handlers/function/function-hand
|
||||
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
|
||||
import { LoopBlockHandler } from '@/executor/handlers/loop/loop-handler'
|
||||
import { ParallelBlockHandler } from '@/executor/handlers/parallel/parallel-handler'
|
||||
// import { WhileBlockHandler } from '@/executor/handlers/while/while-handler'
|
||||
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
|
||||
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
|
||||
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
|
||||
@@ -20,6 +21,7 @@ export {
|
||||
GenericBlockHandler,
|
||||
LoopBlockHandler,
|
||||
ParallelBlockHandler,
|
||||
// WhileBlockHandler,
|
||||
ResponseBlockHandler,
|
||||
RouterBlockHandler,
|
||||
TriggerBlockHandler,
|
||||
|
||||
0
apps/sim/executor/handlers/while/while-handler.ts
Normal file
0
apps/sim/executor/handlers/while/while-handler.ts
Normal file
@@ -750,6 +750,7 @@ describe('Executor', () => {
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
}
|
||||
|
||||
const executor = new Executor(routerWorkflow)
|
||||
@@ -1066,6 +1067,7 @@ describe('Executor', () => {
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
}
|
||||
|
||||
const executor = new Executor(workflow)
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
GenericBlockHandler,
|
||||
LoopBlockHandler,
|
||||
ParallelBlockHandler,
|
||||
// WhileBlockHandler,
|
||||
ResponseBlockHandler,
|
||||
RouterBlockHandler,
|
||||
TriggerBlockHandler,
|
||||
@@ -19,6 +20,7 @@ import {
|
||||
} from '@/executor/handlers'
|
||||
import { LoopManager } from '@/executor/loops/loops'
|
||||
import { ParallelManager } from '@/executor/parallels/parallels'
|
||||
// import { WhileManager } from '@/executor/whiles/whiles'
|
||||
import { PathTracker } from '@/executor/path/path'
|
||||
import { InputResolver } from '@/executor/resolver/resolver'
|
||||
import type {
|
||||
@@ -73,6 +75,7 @@ export class Executor {
|
||||
private resolver: InputResolver
|
||||
private loopManager: LoopManager
|
||||
private parallelManager: ParallelManager
|
||||
// private whileManager: WhileManager
|
||||
private pathTracker: PathTracker
|
||||
private blockHandlers: BlockHandler[]
|
||||
private workflowInput: any
|
||||
@@ -134,6 +137,7 @@ export class Executor {
|
||||
|
||||
this.loopManager = new LoopManager(this.actualWorkflow.loops || {})
|
||||
this.parallelManager = new ParallelManager(this.actualWorkflow.parallels || {})
|
||||
// this.whileManager = new WhileManager(this.actualWorkflow.whiles || {})
|
||||
|
||||
// Calculate accessible blocks for consistent reference resolution
|
||||
const accessibleBlocksMap = BlockPathCalculator.calculateAccessibleBlocksForWorkflow(
|
||||
@@ -159,6 +163,7 @@ export class Executor {
|
||||
new ApiBlockHandler(),
|
||||
new LoopBlockHandler(this.resolver, this.pathTracker),
|
||||
new ParallelBlockHandler(this.resolver, this.pathTracker),
|
||||
// new WhileBlockHandler(this.resolver, this.pathTracker),
|
||||
new ResponseBlockHandler(),
|
||||
new WorkflowBlockHandler(),
|
||||
new GenericBlockHandler(),
|
||||
@@ -417,6 +422,9 @@ export class Executor {
|
||||
// Process parallel iterations - similar to loops but conceptually for parallel execution
|
||||
await this.parallelManager.processParallelIterations(context)
|
||||
|
||||
// Process while iterations - similar concept to loops but condition-driven
|
||||
// await this.whileManager.processWhileIterations(context)
|
||||
|
||||
// Continue execution for any newly activated paths
|
||||
// Only stop execution if there are no more blocks to execute
|
||||
const updatedNextLayer = this.getNextExecutionLayer(context)
|
||||
@@ -560,6 +568,7 @@ export class Executor {
|
||||
}
|
||||
await this.loopManager.processLoopIterations(context)
|
||||
await this.parallelManager.processParallelIterations(context)
|
||||
// await this.whileManager.processWhileIterations(context)
|
||||
const nextLayer = this.getNextExecutionLayer(context)
|
||||
setPendingBlocks(nextLayer)
|
||||
|
||||
@@ -759,6 +768,13 @@ export class Executor {
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize while iterations
|
||||
if (this.actualWorkflow.whiles) {
|
||||
for (const whileId of Object.keys(this.actualWorkflow.whiles)) {
|
||||
context.loopIterations.set(whileId, 0)
|
||||
}
|
||||
}
|
||||
|
||||
// Determine which block to initialize as the starting point
|
||||
let initBlock: SerializedBlock | undefined
|
||||
if (startBlockId) {
|
||||
@@ -1207,6 +1223,20 @@ export class Executor {
|
||||
return loopCompleted
|
||||
}
|
||||
|
||||
// Special handling for while-start-source connections
|
||||
if (conn.sourceHandle === 'while-start-source') {
|
||||
// Activated when while block executes
|
||||
return sourceExecuted
|
||||
}
|
||||
|
||||
// Special handling for while-end-source connections
|
||||
if (conn.sourceHandle === 'while-end-source') {
|
||||
// Activated when while block has completed (condition false or max iterations)
|
||||
const whileState = context.blockStates.get(conn.source)
|
||||
const whileCompleted = Boolean(whileState?.output?.completed)
|
||||
return sourceExecuted && whileCompleted
|
||||
}
|
||||
|
||||
// Special handling for parallel-start-source connections
|
||||
if (conn.sourceHandle === 'parallel-start-source') {
|
||||
// This block is connected to a parallel's start output
|
||||
@@ -1643,7 +1673,11 @@ export class Executor {
|
||||
context.blockLogs.push(blockLog)
|
||||
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
if (block.metadata?.id !== BlockType.LOOP && block.metadata?.id !== BlockType.PARALLEL) {
|
||||
if (
|
||||
block.metadata?.id !== BlockType.LOOP &&
|
||||
block.metadata?.id !== BlockType.PARALLEL &&
|
||||
block.metadata?.id !== BlockType.WHILE
|
||||
) {
|
||||
// Determine iteration context for this block
|
||||
let iterationCurrent: number | undefined
|
||||
let iterationTotal: number | undefined
|
||||
@@ -1755,7 +1789,11 @@ export class Executor {
|
||||
context.blockLogs.push(blockLog)
|
||||
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
if (block.metadata?.id !== BlockType.LOOP && block.metadata?.id !== BlockType.PARALLEL) {
|
||||
if (
|
||||
block.metadata?.id !== BlockType.LOOP &&
|
||||
block.metadata?.id !== BlockType.PARALLEL
|
||||
// block.metadata?.id !== BlockType.WHILE
|
||||
) {
|
||||
// Determine iteration context for this block
|
||||
let iterationCurrent: number | undefined
|
||||
let iterationTotal: number | undefined
|
||||
@@ -1927,6 +1965,7 @@ export class Executor {
|
||||
block?.metadata?.id === BlockType.CONDITION ||
|
||||
block?.metadata?.id === BlockType.LOOP ||
|
||||
block?.metadata?.id === BlockType.PARALLEL
|
||||
// block?.metadata?.id === BlockType.WHILE
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export class InputResolver {
|
||||
private blockByNormalizedName: Map<string, SerializedBlock>
|
||||
private loopsByBlockId: Map<string, string> // Maps block ID to containing loop ID
|
||||
private parallelsByBlockId: Map<string, string> // Maps block ID to containing parallel ID
|
||||
// private whilesByBlockId: Map<string, string> // Maps block ID to containing while ID
|
||||
|
||||
constructor(
|
||||
private workflow: SerializedWorkflow,
|
||||
@@ -70,6 +71,14 @@ export class InputResolver {
|
||||
this.parallelsByBlockId.set(blockId, parallelId)
|
||||
}
|
||||
}
|
||||
|
||||
// Create efficient while lookup map
|
||||
// this.whilesByBlockId = new Map()
|
||||
// for (const [whileId, whileCfg] of Object.entries(workflow.whiles || {})) {
|
||||
// for (const blockId of whileCfg.nodes) {
|
||||
// this.whilesByBlockId.set(blockId, whileId)
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1103,6 +1112,17 @@ export class InputResolver {
|
||||
}
|
||||
}
|
||||
|
||||
// Special case: blocks in the same while can reference each other
|
||||
// const currentBlockWhile = this.whilesByBlockId.get(currentBlockId)
|
||||
// if (currentBlockWhile) {
|
||||
// const whileCfg = this.workflow.whiles?.[currentBlockWhile]
|
||||
// if (whileCfg) {
|
||||
// for (const nodeId of whileCfg.nodes) {
|
||||
// accessibleBlocks.add(nodeId)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
return accessibleBlocks
|
||||
}
|
||||
|
||||
@@ -1867,4 +1887,14 @@ export class InputResolver {
|
||||
getContainingParallelId(blockId: string): string | undefined {
|
||||
return this.parallelsByBlockId.get(blockId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the containing while ID for a block
|
||||
* @param blockId - The ID of the block
|
||||
* @returns The containing while ID or undefined if not in a while
|
||||
*/
|
||||
getContainingWhileId(blockId: string): string | undefined {
|
||||
// return this.whilesByBlockId.get(blockId)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,6 +78,7 @@ export class Routing {
|
||||
// Flow control blocks
|
||||
[BlockType.PARALLEL]: BlockCategory.FLOW_CONTROL,
|
||||
[BlockType.LOOP]: BlockCategory.FLOW_CONTROL,
|
||||
[BlockType.WHILE]: BlockCategory.FLOW_CONTROL,
|
||||
[BlockType.WORKFLOW]: BlockCategory.FLOW_CONTROL,
|
||||
|
||||
// Routing blocks
|
||||
@@ -139,6 +140,8 @@ export class Routing {
|
||||
'parallel-end-source',
|
||||
'loop-start-source',
|
||||
'loop-end-source',
|
||||
'while-start-source',
|
||||
'while-end-source',
|
||||
]
|
||||
|
||||
if (flowControlHandles.includes(sourceHandle || '')) {
|
||||
|
||||
@@ -145,6 +145,17 @@ export interface ExecutionContext {
|
||||
}
|
||||
>
|
||||
|
||||
// While execution tracking
|
||||
whileExecutions?: Map<
|
||||
string,
|
||||
{
|
||||
maxIterations: number
|
||||
loopType: 'while' | 'doWhile'
|
||||
executionResults: Map<string, any> // iteration_0, iteration_1, etc.
|
||||
currentIteration: number
|
||||
}
|
||||
>
|
||||
|
||||
// Mapping for virtual parallel block IDs to their original blocks
|
||||
parallelBlockMapping?: Map<
|
||||
string,
|
||||
|
||||
0
apps/sim/executor/whiles/whiles.test.ts
Normal file
0
apps/sim/executor/whiles/whiles.test.ts
Normal file
0
apps/sim/executor/whiles/whiles.ts
Normal file
0
apps/sim/executor/whiles/whiles.ts
Normal file
@@ -562,8 +562,8 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const blockConfig = getBlock(type)
|
||||
|
||||
// Handle loop/parallel blocks that don't use BlockConfig
|
||||
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
|
||||
// Handle loop/parallel/while blocks that don't use BlockConfig
|
||||
if (!blockConfig && (type === 'loop' || type === 'parallel' || type === 'while')) {
|
||||
// For loop/parallel blocks, use empty subBlocks and outputs
|
||||
const completeBlockData = {
|
||||
id,
|
||||
@@ -1129,6 +1129,16 @@ export function useCollaborativeWorkflow() {
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
// UI-only while type toggle (no server op yet)
|
||||
const collaborativeUpdateWhileType = useCallback(
|
||||
(whileId: string, whileType: 'while' | 'doWhile') => {
|
||||
const currentBlock = workflowStore.blocks[whileId]
|
||||
if (!currentBlock || currentBlock.type !== 'while') return
|
||||
workflowStore.updateWhileType(whileId, whileType)
|
||||
},
|
||||
[workflowStore]
|
||||
)
|
||||
|
||||
// Unified iteration management functions - count and collection only
|
||||
const collaborativeUpdateIterationCount = useCallback(
|
||||
(nodeId: string, iterationType: 'loop' | 'parallel', count: number) => {
|
||||
@@ -1321,6 +1331,7 @@ export function useCollaborativeWorkflow() {
|
||||
// Collaborative loop/parallel operations
|
||||
collaborativeUpdateLoopType,
|
||||
collaborativeUpdateParallelType,
|
||||
collaborativeUpdateWhileType,
|
||||
|
||||
// Unified iteration operations
|
||||
collaborativeUpdateIterationCount,
|
||||
|
||||
@@ -12,6 +12,7 @@ export interface NormalizedWorkflowData {
|
||||
edges: any[]
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
whiles: Record<string, any>
|
||||
isFromNormalizedTables: boolean // Flag to indicate source (true = normalized tables, false = deployed state)
|
||||
}
|
||||
|
||||
@@ -49,6 +50,7 @@ export async function loadDeployedWorkflowState(
|
||||
edges: deployedState.edges || [],
|
||||
loops: deployedState.loops || {},
|
||||
parallels: deployedState.parallels || {},
|
||||
whiles: deployedState.whiles || {},
|
||||
isFromNormalizedTables: false, // Flag to indicate this came from deployed state
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -126,6 +128,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
// Convert subflows to loops and parallels
|
||||
const loops: Record<string, any> = {}
|
||||
const parallels: Record<string, any> = {}
|
||||
const whiles: Record<string, any> = {}
|
||||
|
||||
subflows.forEach((subflow) => {
|
||||
const config = subflow.config || {}
|
||||
@@ -140,6 +143,11 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
id: subflow.id,
|
||||
...config,
|
||||
}
|
||||
} else if (subflow.type === SUBFLOW_TYPES.WHILE) {
|
||||
whiles[subflow.id] = {
|
||||
id: subflow.id,
|
||||
...config,
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Unknown subflow type: ${subflow.type} for subflow ${subflow.id}`)
|
||||
}
|
||||
@@ -150,6 +158,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -238,6 +247,15 @@ export async function saveWorkflowToNormalizedTables(
|
||||
})
|
||||
})
|
||||
|
||||
Object.values(state.whiles || {}).forEach((whileSubflow) => {
|
||||
subflowInserts.push({
|
||||
id: whileSubflow.id,
|
||||
workflowId: workflowId,
|
||||
type: SUBFLOW_TYPES.WHILE,
|
||||
config: whileSubflow,
|
||||
})
|
||||
})
|
||||
|
||||
if (subflowInserts.length > 0) {
|
||||
await tx.insert(workflowSubflows).values(subflowInserts)
|
||||
}
|
||||
@@ -251,6 +269,7 @@ export async function saveWorkflowToNormalizedTables(
|
||||
edges: state.edges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
whiles: state.whiles || {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: state.isDeployed,
|
||||
deployedAt: state.deployedAt,
|
||||
@@ -303,6 +322,7 @@ export async function migrateWorkflowToNormalizedTables(
|
||||
edges: jsonState.edges || [],
|
||||
loops: jsonState.loops || {},
|
||||
parallels: jsonState.parallels || {},
|
||||
whiles: jsonState.whiles || {},
|
||||
lastSaved: jsonState.lastSaved,
|
||||
isDeployed: jsonState.isDeployed,
|
||||
deployedAt: jsonState.deployedAt,
|
||||
|
||||
@@ -15,6 +15,7 @@ export function buildWorkflowStateForTemplate(workflowId: string) {
|
||||
// Generate loops and parallels in the same format as deployment
|
||||
const loops = workflowStore.generateLoopBlocks()
|
||||
const parallels = workflowStore.generateParallelBlocks()
|
||||
const whiles = workflowStore.generateWhileBlocks()
|
||||
|
||||
// Build the state object in the same format as deployment
|
||||
const state = {
|
||||
@@ -22,6 +23,7 @@ export function buildWorkflowStateForTemplate(workflowId: string) {
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
|
||||
@@ -330,6 +330,32 @@ export function hasWorkflowChanged(
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Compare whiles
|
||||
|
||||
const currentWhiles = currentState.whiles || {}
|
||||
const deployedWhiles = deployedState.whiles || {}
|
||||
|
||||
const currentWhileIds = Object.keys(currentWhiles).sort()
|
||||
const deployedWhileIds = Object.keys(deployedWhiles).sort()
|
||||
|
||||
if (
|
||||
currentWhileIds.length !== deployedWhileIds.length ||
|
||||
normalizedStringify(currentWhileIds) !== normalizedStringify(deployedWhileIds)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Compare each while with normalized values
|
||||
for (const whileId of currentWhileIds) {
|
||||
const normalizedCurrentWhile = normalizeValue(currentWhiles[whileId])
|
||||
const normalizedDeployedWhile = normalizeValue(deployedWhiles[whileId])
|
||||
|
||||
if (
|
||||
normalizedStringify(normalizedCurrentWhile) !== normalizedStringify(normalizedDeployedWhile)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -69,7 +69,6 @@
|
||||
"@react-email/components": "^0.0.34",
|
||||
"@sentry/nextjs": "^9.15.0",
|
||||
"@trigger.dev/sdk": "4.0.1",
|
||||
"@types/pg": "8.15.5",
|
||||
"@types/three": "0.177.0",
|
||||
"@vercel/og": "^0.6.5",
|
||||
"@vercel/speed-insights": "^1.2.0",
|
||||
@@ -98,13 +97,11 @@
|
||||
"lenis": "^1.2.3",
|
||||
"lucide-react": "^0.479.0",
|
||||
"mammoth": "^1.9.0",
|
||||
"mysql2": "3.14.3",
|
||||
"next": "^15.3.2",
|
||||
"next-runtime-env": "3.3.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"openai": "^4.91.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"pg": "8.16.3",
|
||||
"postgres": "^3.4.5",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.1.0",
|
||||
|
||||
@@ -3,7 +3,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import type { BlockState, Loop, Parallel, While } from '@/stores/workflows/workflow/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('Serializer')
|
||||
@@ -27,6 +27,7 @@ export class Serializer {
|
||||
edges: Edge[],
|
||||
loops: Record<string, Loop>,
|
||||
parallels?: Record<string, Parallel>,
|
||||
whiles?: Record<string, While>,
|
||||
validateRequired = false
|
||||
): SerializedWorkflow {
|
||||
return {
|
||||
@@ -40,12 +41,13 @@ export class Serializer {
|
||||
})),
|
||||
loops,
|
||||
parallels,
|
||||
whiles,
|
||||
}
|
||||
}
|
||||
|
||||
private serializeBlock(block: BlockState, validateRequired = false): SerializedBlock {
|
||||
// Special handling for subflow blocks (loops, parallels, etc.)
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
|
||||
return {
|
||||
id: block.id,
|
||||
position: block.position,
|
||||
@@ -58,9 +60,15 @@ export class Serializer {
|
||||
metadata: {
|
||||
id: block.type,
|
||||
name: block.name,
|
||||
description: block.type === 'loop' ? 'Loop container' : 'Parallel container',
|
||||
description:
|
||||
block.type === 'loop'
|
||||
? 'Loop container'
|
||||
: block.type === 'parallel'
|
||||
? 'Parallel container'
|
||||
: 'While container',
|
||||
category: 'subflow',
|
||||
color: block.type === 'loop' ? '#3b82f6' : '#8b5cf6',
|
||||
color:
|
||||
block.type === 'loop' ? '#3b82f6' : block.type === 'parallel' ? '#8b5cf6' : '#FF9F43', // Orange color for while blocks
|
||||
},
|
||||
enabled: block.enabled,
|
||||
}
|
||||
@@ -211,8 +219,8 @@ export class Serializer {
|
||||
|
||||
private extractParams(block: BlockState): Record<string, any> {
|
||||
// Special handling for subflow blocks (loops, parallels, etc.)
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
return {} // Loop and parallel blocks don't have traditional params
|
||||
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
|
||||
return {} // Loop, parallel, and while blocks don't have traditional params
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
@@ -359,13 +367,15 @@ export class Serializer {
|
||||
}
|
||||
|
||||
// Special handling for subflow blocks (loops, parallels, etc.)
|
||||
if (blockType === 'loop' || blockType === 'parallel') {
|
||||
if (blockType === 'loop' || blockType === 'parallel' || blockType === 'while') {
|
||||
return {
|
||||
id: serializedBlock.id,
|
||||
type: blockType,
|
||||
name: serializedBlock.metadata?.name || (blockType === 'loop' ? 'Loop' : 'Parallel'),
|
||||
name:
|
||||
serializedBlock.metadata?.name ||
|
||||
(blockType === 'loop' ? 'Loop' : blockType === 'parallel' ? 'Parallel' : 'While'),
|
||||
position: serializedBlock.position,
|
||||
subBlocks: {}, // Loops and parallels don't have traditional subBlocks
|
||||
subBlocks: {}, // Loops, parallels, and whiles don't have traditional subBlocks
|
||||
outputs: serializedBlock.outputs,
|
||||
enabled: serializedBlock.enabled ?? true,
|
||||
data: serializedBlock.config.params, // Preserve the data (parallelType, count, etc.)
|
||||
|
||||
@@ -7,6 +7,7 @@ export interface SerializedWorkflow {
|
||||
connections: SerializedConnection[]
|
||||
loops: Record<string, SerializedLoop>
|
||||
parallels?: Record<string, SerializedParallel>
|
||||
whiles?: Record<string, SerializedWhile>
|
||||
}
|
||||
|
||||
export interface SerializedConnection {
|
||||
@@ -55,3 +56,10 @@ export interface SerializedParallel {
|
||||
count?: number // Number of parallel executions for count-based parallel
|
||||
parallelType?: 'count' | 'collection' // Explicit parallel type to avoid inference bugs
|
||||
}
|
||||
|
||||
export interface SerializedWhile {
|
||||
id: string
|
||||
nodes: string[]
|
||||
iterations: number
|
||||
whileType?: 'while' | 'doWhile'
|
||||
}
|
||||
|
||||
@@ -61,6 +61,7 @@ async function insertAutoConnectEdge(
|
||||
enum SubflowType {
|
||||
LOOP = 'loop',
|
||||
PARALLEL = 'parallel',
|
||||
WHILE = 'while',
|
||||
}
|
||||
|
||||
// Helper function to check if a block type is a subflow type
|
||||
@@ -134,6 +135,7 @@ export async function getWorkflowState(workflowId: string) {
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
whiles: normalizedData.whiles,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: workflowData[0].isDeployed || false,
|
||||
deployedAt: workflowData[0].deployedAt,
|
||||
@@ -280,7 +282,7 @@ async function handleBlockOperationTx(
|
||||
throw insertError
|
||||
}
|
||||
|
||||
// Auto-create subflow entry for loop/parallel blocks
|
||||
// Auto-create subflow entry for loop/parallel/while blocks
|
||||
if (isSubflowBlockType(payload.type)) {
|
||||
try {
|
||||
const subflowConfig =
|
||||
@@ -672,7 +674,7 @@ async function handleBlockOperationTx(
|
||||
throw insertError
|
||||
}
|
||||
|
||||
// Auto-create subflow entry for loop/parallel blocks
|
||||
// Auto-create subflow entry for loop/parallel/while blocks
|
||||
if (isSubflowBlockType(payload.type)) {
|
||||
try {
|
||||
const subflowConfig =
|
||||
@@ -832,7 +834,7 @@ async function handleSubflowOperationTx(
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
|
||||
} else if (payload.type === 'parallel') {
|
||||
} else if (payload.type === 'parallel' || payload.type === 'while') {
|
||||
// Update the parallel block's data properties
|
||||
const blockData = {
|
||||
...payload.config,
|
||||
|
||||
@@ -18,7 +18,11 @@ import type {
|
||||
SyncControl,
|
||||
WorkflowState,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import {
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
generateWhileBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('WorkflowStore')
|
||||
|
||||
@@ -35,6 +39,7 @@ const initialState = {
|
||||
deploymentStatuses: {},
|
||||
needsRedeployment: false,
|
||||
hasActiveWebhook: false,
|
||||
whiles: {},
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
@@ -43,6 +48,7 @@ const initialState = {
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isDeployed: false,
|
||||
isPublished: false,
|
||||
},
|
||||
@@ -106,8 +112,8 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
}
|
||||
) => {
|
||||
const blockConfig = getBlock(type)
|
||||
// For custom nodes like loop and parallel that don't use BlockConfig
|
||||
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
|
||||
// For custom nodes like loop, parallel, and while that don't use BlockConfig
|
||||
if (!blockConfig && (type === 'loop' || type === 'parallel' || type === 'while')) {
|
||||
// Merge parentId and extent into data if provided
|
||||
const nodeData = {
|
||||
...data,
|
||||
@@ -136,6 +142,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: get().generateLoopBlocks(),
|
||||
parallels: get().generateParallelBlocks(),
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -187,6 +194,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: get().generateLoopBlocks(),
|
||||
parallels: get().generateParallelBlocks(),
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -287,6 +295,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
logger.info('[WorkflowStore/updateParentId] Updated parentId relationship:', {
|
||||
@@ -316,6 +325,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges].filter((edge) => edge.source !== id && edge.target !== id),
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
// Find and remove all child blocks if this is a parent node
|
||||
@@ -407,6 +417,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: newEdges,
|
||||
loops: generateLoopBlocks(get().blocks),
|
||||
parallels: get().generateParallelBlocks(),
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -430,6 +441,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: newEdges,
|
||||
loops: generateLoopBlocks(get().blocks),
|
||||
parallels: get().generateParallelBlocks(),
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -452,6 +464,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
whiles: {},
|
||||
isDeployed: false,
|
||||
isPublished: false,
|
||||
},
|
||||
@@ -484,6 +497,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: state.edges,
|
||||
loops: state.loops,
|
||||
parallels: state.parallels,
|
||||
whiles: state.whiles,
|
||||
lastSaved: state.lastSaved,
|
||||
isDeployed: state.isDeployed,
|
||||
deployedAt: state.deployedAt,
|
||||
@@ -505,6 +519,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -557,6 +572,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: get().generateLoopBlocks(),
|
||||
parallels: get().generateParallelBlocks(),
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
// Update the subblock store with the duplicated values
|
||||
@@ -641,6 +657,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
// Update references in subblock store
|
||||
@@ -914,6 +931,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: deployedState.edges,
|
||||
loops: deployedState.loops || {},
|
||||
parallels: deployedState.parallels || {},
|
||||
whiles: deployedState.whiles || {},
|
||||
isDeployed: true,
|
||||
needsRedeployment: false,
|
||||
hasActiveWebhook: false, // Reset webhook status
|
||||
@@ -1037,6 +1055,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: filteredEdges,
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -1106,6 +1125,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: generateParallelBlocks(newBlocks), // Regenerate parallels
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -1134,6 +1154,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: generateParallelBlocks(newBlocks), // Regenerate parallels
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -1162,6 +1183,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: generateParallelBlocks(newBlocks), // Regenerate parallels
|
||||
whiles: { ...get().whiles },
|
||||
}
|
||||
|
||||
set(newState)
|
||||
@@ -1174,6 +1196,39 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
generateParallelBlocks: () => {
|
||||
return generateParallelBlocks(get().blocks)
|
||||
},
|
||||
|
||||
// While block methods implementation (UI-only toggle)
|
||||
updateWhileType: (whileId: string, whileType) => {
|
||||
const block = get().blocks[whileId]
|
||||
if (!block || block.type !== 'while') return
|
||||
|
||||
const newBlocks = {
|
||||
...get().blocks,
|
||||
[whileId]: {
|
||||
...block,
|
||||
data: {
|
||||
...block.data,
|
||||
whileType,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const newState = {
|
||||
blocks: newBlocks,
|
||||
edges: [...get().edges],
|
||||
loops: { ...get().loops },
|
||||
parallels: { ...get().parallels },
|
||||
whiles: get().generateWhileBlocks(),
|
||||
}
|
||||
|
||||
set(newState)
|
||||
pushHistory(set, get, newState, `Update while type`)
|
||||
get().updateLastSaved()
|
||||
},
|
||||
|
||||
generateWhileBlocks: () => {
|
||||
return generateWhileBlocks(get().blocks)
|
||||
},
|
||||
})),
|
||||
{ name: 'workflow-store' }
|
||||
)
|
||||
|
||||
@@ -5,8 +5,16 @@ import type { DeploymentStatus } from '@/stores/workflows/registry/types'
|
||||
export const SUBFLOW_TYPES = {
|
||||
LOOP: 'loop',
|
||||
PARALLEL: 'parallel',
|
||||
WHILE: 'while',
|
||||
} as const
|
||||
|
||||
export const WHILE_TYPES = {
|
||||
WHILE: 'while',
|
||||
DO_WHILE: 'doWhile',
|
||||
} as const
|
||||
|
||||
export type WhileType = (typeof WHILE_TYPES)[keyof typeof WHILE_TYPES]
|
||||
|
||||
export type SubflowType = (typeof SUBFLOW_TYPES)[keyof typeof SUBFLOW_TYPES]
|
||||
|
||||
export function isValidSubflowType(type: string): type is SubflowType {
|
||||
@@ -26,12 +34,18 @@ export interface ParallelConfig {
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
|
||||
export interface WhileConfig {
|
||||
nodes: string[]
|
||||
iterations: number
|
||||
whileType: WhileType
|
||||
}
|
||||
|
||||
// Generic subflow interface
|
||||
export interface Subflow {
|
||||
id: string
|
||||
workflowId: string
|
||||
type: SubflowType
|
||||
config: LoopConfig | ParallelConfig
|
||||
config: LoopConfig | ParallelConfig | WhileConfig
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
@@ -58,6 +72,9 @@ export interface BlockData {
|
||||
// Parallel-specific properties
|
||||
parallelType?: 'collection' | 'count' // Type of parallel execution
|
||||
|
||||
// While-specific properties
|
||||
whileType?: WhileType
|
||||
|
||||
// Container node type (for ReactFlow node type determination)
|
||||
type?: string
|
||||
}
|
||||
@@ -112,6 +129,13 @@ export interface ParallelBlock {
|
||||
}
|
||||
}
|
||||
|
||||
export interface While {
|
||||
id: string
|
||||
nodes: string[]
|
||||
iterations: number
|
||||
whileType: WhileType
|
||||
}
|
||||
|
||||
export interface Loop {
|
||||
id: string
|
||||
nodes: string[]
|
||||
@@ -134,6 +158,7 @@ export interface WorkflowState {
|
||||
lastSaved?: number
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
whiles: Record<string, While>
|
||||
lastUpdate?: number
|
||||
// Legacy deployment fields (keeping for compatibility)
|
||||
isDeployed?: boolean
|
||||
@@ -196,8 +221,10 @@ export interface WorkflowActions {
|
||||
updateParallelCount: (parallelId: string, count: number) => void
|
||||
updateParallelCollection: (parallelId: string, collection: string) => void
|
||||
updateParallelType: (parallelId: string, parallelType: 'count' | 'collection') => void
|
||||
updateWhileType: (whileId: string, whileType: WhileType) => void
|
||||
generateLoopBlocks: () => Record<string, Loop>
|
||||
generateParallelBlocks: () => Record<string, Parallel>
|
||||
generateWhileBlocks: () => Record<string, While>
|
||||
setNeedsRedeploymentFlag: (needsRedeployment: boolean) => void
|
||||
setWebhookStatus: (hasActiveWebhook: boolean) => void
|
||||
revertToDeployedState: (deployedState: WorkflowState) => void
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import type { BlockState, Loop, Parallel, While } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const DEFAULT_LOOP_ITERATIONS = 5
|
||||
const DEFAULT_WHILE_ITERATIONS = 1000
|
||||
|
||||
/**
|
||||
* Convert UI loop block to executor Loop format
|
||||
@@ -39,6 +40,37 @@ export function convertLoopBlockToLoop(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert UI while block to executor While format
|
||||
*
|
||||
* @param whileBlockId - ID of the while block to convert
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns While object for execution engine or undefined if not a valid while
|
||||
*/
|
||||
export function convertWhileBlockToWhile(
|
||||
whileBlockId: string,
|
||||
blocks: Record<string, BlockState>
|
||||
): While | undefined {
|
||||
const whileBlock = blocks[whileBlockId]
|
||||
if (!whileBlock || whileBlock.type !== 'while') return undefined
|
||||
|
||||
// Default iterations as a safety cap; higher for whiles
|
||||
const iterations =
|
||||
(whileBlock.data as any)?.iterations ||
|
||||
(whileBlock.data as any)?.count ||
|
||||
DEFAULT_WHILE_ITERATIONS
|
||||
|
||||
// Default whileType to 'while' when not provided
|
||||
const whileType = (whileBlock.data as any)?.whileType || 'while'
|
||||
|
||||
return {
|
||||
id: whileBlockId,
|
||||
nodes: findChildNodes(whileBlockId, blocks),
|
||||
iterations,
|
||||
whileType,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert UI parallel block to executor Parallel format
|
||||
*
|
||||
@@ -162,3 +194,25 @@ export function generateParallelBlocks(
|
||||
|
||||
return parallels
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a complete collection of while blocks from the UI blocks
|
||||
*
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns Record of While objects for execution engine
|
||||
*/
|
||||
export function generateWhileBlocks(blocks: Record<string, BlockState>): Record<string, While> {
|
||||
const whiles: Record<string, While> = {}
|
||||
|
||||
// Find all while nodes
|
||||
Object.entries(blocks)
|
||||
.filter(([_, block]) => block.type === 'while')
|
||||
.forEach(([id]) => {
|
||||
const whileCfg = convertWhileBlockToWhile(id, blocks)
|
||||
if (whileCfg) {
|
||||
whiles[id] = whileCfg
|
||||
}
|
||||
})
|
||||
|
||||
return whiles
|
||||
}
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
import type { MySQLDeleteParams, MySQLResponse } from '@/tools/mysql/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const deleteTool: ToolConfig<MySQLDeleteParams, MySQLResponse> = {
|
||||
id: 'mysql_delete',
|
||||
name: 'MySQL Delete',
|
||||
description: 'Delete records from MySQL database',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server port (default: 3306)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to connect to',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database password',
|
||||
},
|
||||
ssl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'SSL connection mode (disabled, required, preferred)',
|
||||
},
|
||||
table: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Table name to delete from',
|
||||
},
|
||||
where: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'WHERE clause condition (without WHERE keyword)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/mysql/delete',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl || 'preferred',
|
||||
table: params.table,
|
||||
where: params.where,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'MySQL delete failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Data deleted successfully',
|
||||
rows: data.rows || [],
|
||||
rowCount: data.rowCount || 0,
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
rows: { type: 'array', description: 'Array of deleted rows' },
|
||||
rowCount: { type: 'number', description: 'Number of rows deleted' },
|
||||
},
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
import type { MySQLExecuteParams, MySQLResponse } from '@/tools/mysql/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const executeTool: ToolConfig<MySQLExecuteParams, MySQLResponse> = {
|
||||
id: 'mysql_execute',
|
||||
name: 'MySQL Execute',
|
||||
description: 'Execute raw SQL query on MySQL database',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server port (default: 3306)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to connect to',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database password',
|
||||
},
|
||||
ssl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'SSL connection mode (disabled, required, preferred)',
|
||||
},
|
||||
query: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Raw SQL query to execute',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/mysql/execute',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl || 'preferred',
|
||||
query: params.query,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'MySQL execute failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Query executed successfully',
|
||||
rows: data.rows || [],
|
||||
rowCount: data.rowCount || 0,
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
rows: { type: 'array', description: 'Array of rows returned from the query' },
|
||||
rowCount: { type: 'number', description: 'Number of rows affected' },
|
||||
},
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user