mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 23:17:59 -05:00
Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed9b9ad83f | ||
|
|
766279bb8b | ||
|
|
1038e148c3 | ||
|
|
8b78200991 | ||
|
|
c8f4791582 | ||
|
|
6c9e0ec88b | ||
|
|
bbbf1c2941 | ||
|
|
efc487a845 | ||
|
|
5786909c5e | ||
|
|
833c5fefd5 | ||
|
|
79dd1ccb9f | ||
|
|
730164abee | ||
|
|
25b2c45ec0 | ||
|
|
780870c48e |
@@ -33,12 +33,15 @@
|
||||
"microsoft_planner",
|
||||
"microsoft_teams",
|
||||
"mistral_parse",
|
||||
"mysql",
|
||||
"notion",
|
||||
"onedrive",
|
||||
"openai",
|
||||
"outlook",
|
||||
"parallel_ai",
|
||||
"perplexity",
|
||||
"pinecone",
|
||||
"postgresql",
|
||||
"qdrant",
|
||||
"reddit",
|
||||
"s3",
|
||||
|
||||
180
apps/docs/content/docs/tools/mysql.mdx
Normal file
180
apps/docs/content/docs/tools/mysql.mdx
Normal file
@@ -0,0 +1,180 @@
|
||||
---
|
||||
title: MySQL
|
||||
description: Connect to MySQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mysql"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
|
||||
|
||||
viewBox='0 0 25.6 25.6'
|
||||
>
|
||||
<path
|
||||
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
|
||||
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
|
||||
fillRule='evenodd'
|
||||
fill='#00678c'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [MySQL](https://www.mysql.com/) tool enables you to connect to any MySQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the MySQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your MySQL tables using the `mysql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `mysql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `mysql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `mysql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `mysql_execute` operation for advanced use cases.
|
||||
|
||||
The MySQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your MySQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `mysql_query`
|
||||
|
||||
Execute SELECT query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `mysql_insert`
|
||||
|
||||
Insert new record into MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert into |
|
||||
| `data` | object | Yes | Data to insert as key-value pairs |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of inserted rows |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `mysql_update`
|
||||
|
||||
Update existing records in MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update |
|
||||
| `data` | object | Yes | Data to update as key-value pairs |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of updated rows |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `mysql_delete`
|
||||
|
||||
Delete records from MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of deleted rows |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `mysql_execute`
|
||||
|
||||
Execute raw SQL query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `mysql`
|
||||
106
apps/docs/content/docs/tools/parallel_ai.mdx
Normal file
106
apps/docs/content/docs/tools/parallel_ai.mdx
Normal file
@@ -0,0 +1,106 @@
|
||||
---
|
||||
title: Parallel AI
|
||||
description: Search with Parallel AI
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="parallel_ai"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
fill='currentColor'
|
||||
|
||||
|
||||
viewBox='0 0 271 270'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Parallel AI](https://parallel.ai/) is an advanced web search and content extraction platform designed to deliver comprehensive, high-quality results for any query. By leveraging intelligent processing and large-scale data extraction, Parallel AI enables users and agents to access, analyze, and synthesize information from across the web with speed and accuracy.
|
||||
|
||||
With Parallel AI, you can:
|
||||
|
||||
- **Search the web intelligently**: Retrieve relevant, up-to-date information from a wide range of sources
|
||||
- **Extract and summarize content**: Get concise, meaningful excerpts from web pages and documents
|
||||
- **Customize search objectives**: Tailor queries to specific needs or questions for targeted results
|
||||
- **Process results at scale**: Handle large volumes of search results with advanced processing options
|
||||
- **Integrate with workflows**: Use Parallel AI within Sim to automate research, content gathering, and knowledge extraction
|
||||
- **Control output granularity**: Specify the number of results and the amount of content per result
|
||||
- **Secure API access**: Protect your searches and data with API key authentication
|
||||
|
||||
In Sim, the Parallel AI integration empowers your agents to perform web searches and extract content programmatically. This enables powerful automation scenarios such as real-time research, competitive analysis, content monitoring, and knowledge base creation. By connecting Sim with Parallel AI, you unlock the ability for agents to gather, process, and utilize web data as part of your automated workflows.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `parallel_search`
|
||||
|
||||
Search the web using Parallel AI. Provides comprehensive search results with intelligent processing and content extraction.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `objective` | string | Yes | The search objective or question to answer |
|
||||
| `search_queries` | string | No | Optional comma-separated list of search queries to execute |
|
||||
| `processor` | string | No | Processing method: base or pro \(default: base\) |
|
||||
| `max_results` | number | No | Maximum number of results to return \(default: 5\) |
|
||||
| `max_chars_per_result` | number | No | Maximum characters per result \(default: 1500\) |
|
||||
| `apiKey` | string | Yes | Parallel AI API Key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Search results with excerpts from relevant pages |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `parallel_ai`
|
||||
188
apps/docs/content/docs/tools/postgresql.mdx
Normal file
188
apps/docs/content/docs/tools/postgresql.mdx
Normal file
@@ -0,0 +1,188 @@
|
||||
---
|
||||
title: PostgreSQL
|
||||
description: Connect to PostgreSQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="postgresql"
|
||||
color="#336791"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
|
||||
|
||||
viewBox='-4 0 264 264'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMinYMin meet'
|
||||
>
|
||||
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
|
||||
<path
|
||||
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
|
||||
fill='#336791'
|
||||
/>
|
||||
<path
|
||||
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
|
||||
fill='#FFF'
|
||||
/>
|
||||
<path
|
||||
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
|
||||
fill='#FFF'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [PostgreSQL](https://www.postgresql.org/) tool enables you to connect to any PostgreSQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the PostgreSQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your PostgreSQL tables using the `postgresql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `postgresql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `postgresql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `postgresql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `postgresql_execute` operation for advanced use cases.
|
||||
|
||||
The PostgreSQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your PostgreSQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `postgresql_query`
|
||||
|
||||
Execute a SELECT query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `postgresql_insert`
|
||||
|
||||
Insert data into PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert data into |
|
||||
| `data` | object | Yes | Data object to insert \(key-value pairs\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Inserted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `postgresql_update`
|
||||
|
||||
Update data in PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update data in |
|
||||
| `data` | object | Yes | Data object with fields to update \(key-value pairs\) |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Updated data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `postgresql_delete`
|
||||
|
||||
Delete data from PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete data from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Deleted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `postgresql_execute`
|
||||
|
||||
Execute raw SQL query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `postgresql`
|
||||
@@ -1,150 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { processDailyBillingCheck } from '@/lib/billing/core/billing'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('DailyBillingCron')
|
||||
|
||||
/**
|
||||
* Daily billing CRON job endpoint that checks individual billing periods
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
logger.info('Starting daily billing check cron job')
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
// Process overage billing for users and organizations with periods ending today
|
||||
const result = await processDailyBillingCheck()
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Daily billing check completed successfully', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error('Daily billing check completed with errors', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
errors: result.errors,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
errors: result.errors,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in daily billing cron job', { error })
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Internal server error during daily billing check',
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for manual testing and health checks
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check health check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
const startTime = Date.now()
|
||||
const result = await processDailyBillingCheck()
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Daily billing check (GET) completed successfully', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error('Daily billing check (GET) completed with errors', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
errors: result.errors,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
errors: result.errors,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in daily billing (GET) cron job', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Internal server error during daily billing check',
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -143,7 +143,7 @@ async function generateChatTitleAsync(
|
||||
streamController?: ReadableStreamDefaultController<Uint8Array>
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`[${requestId}] Starting async title generation for chat ${chatId}`)
|
||||
// logger.info(`[${requestId}] Starting async title generation for chat ${chatId}`)
|
||||
|
||||
const title = await generateChatTitle(userMessage)
|
||||
|
||||
@@ -167,7 +167,7 @@ async function generateChatTitleAsync(
|
||||
logger.debug(`[${requestId}] Sent title_updated event to client: "${title}"`)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Generated title for chat ${chatId}: "${title}"`)
|
||||
// logger.info(`[${requestId}] Generated title for chat ${chatId}: "${title}"`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate title for chat ${chatId}:`, error)
|
||||
// Don't throw - this is a background operation
|
||||
@@ -229,21 +229,21 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Processing copilot chat request`, {
|
||||
userId: authenticatedUserId,
|
||||
workflowId,
|
||||
chatId,
|
||||
mode,
|
||||
stream,
|
||||
createNewChat,
|
||||
messageLength: message.length,
|
||||
hasImplicitFeedback: !!implicitFeedback,
|
||||
provider: provider || 'openai',
|
||||
hasConversationId: !!conversationId,
|
||||
depth,
|
||||
prefetch,
|
||||
origin: requestOrigin,
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Processing copilot chat request`, {
|
||||
// userId: authenticatedUserId,
|
||||
// workflowId,
|
||||
// chatId,
|
||||
// mode,
|
||||
// stream,
|
||||
// createNewChat,
|
||||
// messageLength: message.length,
|
||||
// hasImplicitFeedback: !!implicitFeedback,
|
||||
// provider: provider || 'openai',
|
||||
// hasConversationId: !!conversationId,
|
||||
// depth,
|
||||
// prefetch,
|
||||
// origin: requestOrigin,
|
||||
// })
|
||||
|
||||
// Handle chat context
|
||||
let currentChat: any = null
|
||||
@@ -285,7 +285,7 @@ export async function POST(req: NextRequest) {
|
||||
// Process file attachments if present
|
||||
const processedFileContents: any[] = []
|
||||
if (fileAttachments && fileAttachments.length > 0) {
|
||||
logger.info(`[${tracker.requestId}] Processing ${fileAttachments.length} file attachments`)
|
||||
// logger.info(`[${tracker.requestId}] Processing ${fileAttachments.length} file attachments`)
|
||||
|
||||
for (const attachment of fileAttachments) {
|
||||
try {
|
||||
@@ -296,7 +296,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
// Download file from S3
|
||||
logger.info(`[${tracker.requestId}] Downloading file: ${attachment.s3_key}`)
|
||||
// logger.info(`[${tracker.requestId}] Downloading file: ${attachment.s3_key}`)
|
||||
let fileBuffer: Buffer
|
||||
if (USE_S3_STORAGE) {
|
||||
fileBuffer = await downloadFromS3WithConfig(attachment.s3_key, S3_COPILOT_CONFIG)
|
||||
@@ -309,9 +309,9 @@ export async function POST(req: NextRequest) {
|
||||
const fileContent = createAnthropicFileContent(fileBuffer, attachment.media_type)
|
||||
if (fileContent) {
|
||||
processedFileContents.push(fileContent)
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Processed file: ${attachment.filename} (${attachment.media_type})`
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Processed file: ${attachment.filename} (${attachment.media_type})`
|
||||
// )
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
@@ -424,27 +424,7 @@ export async function POST(req: NextRequest) {
|
||||
...(requestOrigin ? { origin: requestOrigin } : {}),
|
||||
}
|
||||
|
||||
// Log the payload being sent to the streaming endpoint
|
||||
try {
|
||||
logger.info(`[${tracker.requestId}] Sending payload to sim agent streaming endpoint`, {
|
||||
url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
|
||||
provider: providerToUse,
|
||||
mode,
|
||||
stream,
|
||||
workflowId,
|
||||
hasConversationId: !!effectiveConversationId,
|
||||
depth: typeof effectiveDepth === 'number' ? effectiveDepth : undefined,
|
||||
prefetch: typeof effectivePrefetch === 'boolean' ? effectivePrefetch : undefined,
|
||||
messagesCount: requestPayload.messages.length,
|
||||
...(requestOrigin ? { origin: requestOrigin } : {}),
|
||||
})
|
||||
// Full payload as JSON string
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Full streaming payload: ${JSON.stringify(requestPayload)}`
|
||||
)
|
||||
} catch (e) {
|
||||
logger.warn(`[${tracker.requestId}] Failed to log payload preview for streaming endpoint`, e)
|
||||
}
|
||||
// Log the payload being sent to the streaming endpoint (logs currently disabled)
|
||||
|
||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
|
||||
method: 'POST',
|
||||
@@ -475,7 +455,7 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
// If streaming is requested, forward the stream and update chat later
|
||||
if (stream && simAgentResponse.body) {
|
||||
logger.info(`[${tracker.requestId}] Streaming response from sim agent`)
|
||||
// logger.info(`[${tracker.requestId}] Streaming response from sim agent`)
|
||||
|
||||
// Create user message to save
|
||||
const userMessage = {
|
||||
@@ -493,7 +473,7 @@ export async function POST(req: NextRequest) {
|
||||
let assistantContent = ''
|
||||
const toolCalls: any[] = []
|
||||
let buffer = ''
|
||||
let isFirstDone = true
|
||||
const isFirstDone = true
|
||||
let responseIdFromStart: string | undefined
|
||||
let responseIdFromDone: string | undefined
|
||||
// Track tool call progress to identify a safe done event
|
||||
@@ -515,30 +495,30 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
// Start title generation in parallel if needed
|
||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||
logger.info(`[${tracker.requestId}] Starting title generation with stream updates`, {
|
||||
chatId: actualChatId,
|
||||
hasTitle: !!currentChat?.title,
|
||||
conversationLength: conversationHistory.length,
|
||||
message: message.substring(0, 100) + (message.length > 100 ? '...' : ''),
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Starting title generation with stream updates`, {
|
||||
// chatId: actualChatId,
|
||||
// hasTitle: !!currentChat?.title,
|
||||
// conversationLength: conversationHistory.length,
|
||||
// message: message.substring(0, 100) + (message.length > 100 ? '...' : ''),
|
||||
// })
|
||||
generateChatTitleAsync(actualChatId, message, tracker.requestId, controller).catch(
|
||||
(error) => {
|
||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.debug(`[${tracker.requestId}] Skipping title generation`, {
|
||||
chatId: actualChatId,
|
||||
hasTitle: !!currentChat?.title,
|
||||
conversationLength: conversationHistory.length,
|
||||
reason: !actualChatId
|
||||
? 'no chatId'
|
||||
: currentChat?.title
|
||||
? 'already has title'
|
||||
: conversationHistory.length > 0
|
||||
? 'not first message'
|
||||
: 'unknown',
|
||||
})
|
||||
// logger.debug(`[${tracker.requestId}] Skipping title generation`, {
|
||||
// chatId: actualChatId,
|
||||
// hasTitle: !!currentChat?.title,
|
||||
// conversationLength: conversationHistory.length,
|
||||
// reason: !actualChatId
|
||||
// ? 'no chatId'
|
||||
// : currentChat?.title
|
||||
// ? 'already has title'
|
||||
// : conversationHistory.length > 0
|
||||
// ? 'not first message'
|
||||
// : 'unknown',
|
||||
// })
|
||||
}
|
||||
|
||||
// Forward the sim agent stream and capture assistant response
|
||||
@@ -549,7 +529,7 @@ export async function POST(req: NextRequest) {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) {
|
||||
logger.info(`[${tracker.requestId}] Stream reading completed`)
|
||||
// logger.info(`[${tracker.requestId}] Stream reading completed`)
|
||||
break
|
||||
}
|
||||
|
||||
@@ -559,9 +539,9 @@ export async function POST(req: NextRequest) {
|
||||
controller.enqueue(value)
|
||||
} catch (error) {
|
||||
// Client disconnected - stop reading from sim agent
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Client disconnected, stopping stream processing`
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Client disconnected, stopping stream processing`
|
||||
// )
|
||||
reader.cancel() // Stop reading from sim agent
|
||||
break
|
||||
}
|
||||
@@ -608,15 +588,15 @@ export async function POST(req: NextRequest) {
|
||||
break
|
||||
|
||||
case 'tool_call':
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Tool call ${event.data?.partial ? '(partial)' : '(complete)'}:`,
|
||||
{
|
||||
id: event.data?.id,
|
||||
name: event.data?.name,
|
||||
arguments: event.data?.arguments,
|
||||
blockIndex: event.data?._blockIndex,
|
||||
}
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Tool call ${event.data?.partial ? '(partial)' : '(complete)'}:`,
|
||||
// {
|
||||
// id: event.data?.id,
|
||||
// name: event.data?.name,
|
||||
// arguments: event.data?.arguments,
|
||||
// blockIndex: event.data?._blockIndex,
|
||||
// }
|
||||
// )
|
||||
if (!event.data?.partial) {
|
||||
toolCalls.push(event.data)
|
||||
if (event.data?.id) {
|
||||
@@ -625,30 +605,24 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
break
|
||||
|
||||
case 'tool_execution':
|
||||
logger.info(`[${tracker.requestId}] Tool execution started:`, {
|
||||
toolCallId: event.toolCallId,
|
||||
toolName: event.toolName,
|
||||
status: event.status,
|
||||
})
|
||||
case 'tool_generating':
|
||||
// logger.info(`[${tracker.requestId}] Tool generating:`, {
|
||||
// toolCallId: event.toolCallId,
|
||||
// toolName: event.toolName,
|
||||
// })
|
||||
if (event.toolCallId) {
|
||||
if (event.status === 'completed') {
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
completedToolExecutionIds.add(event.toolCallId)
|
||||
} else {
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
break
|
||||
|
||||
case 'tool_result':
|
||||
logger.info(`[${tracker.requestId}] Tool result received:`, {
|
||||
toolCallId: event.toolCallId,
|
||||
toolName: event.toolName,
|
||||
success: event.success,
|
||||
result: `${JSON.stringify(event.result).substring(0, 200)}...`,
|
||||
resultSize: JSON.stringify(event.result).length,
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Tool result received:`, {
|
||||
// toolCallId: event.toolCallId,
|
||||
// toolName: event.toolName,
|
||||
// success: event.success,
|
||||
// result: `${JSON.stringify(event.result).substring(0, 200)}...`,
|
||||
// resultSize: JSON.stringify(event.result).length,
|
||||
// })
|
||||
if (event.toolCallId) {
|
||||
completedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
@@ -669,9 +643,6 @@ export async function POST(req: NextRequest) {
|
||||
case 'start':
|
||||
if (event.data?.responseId) {
|
||||
responseIdFromStart = event.data.responseId
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Received start event with responseId: ${responseIdFromStart}`
|
||||
)
|
||||
}
|
||||
break
|
||||
|
||||
@@ -679,9 +650,7 @@ export async function POST(req: NextRequest) {
|
||||
if (event.data?.responseId) {
|
||||
responseIdFromDone = event.data.responseId
|
||||
lastDoneResponseId = responseIdFromDone
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Received done event with responseId: ${responseIdFromDone}`
|
||||
)
|
||||
|
||||
// Mark this done as safe only if no tool call is currently in progress or pending
|
||||
const announced = announcedToolCallIds.size
|
||||
const completed = completedToolExecutionIds.size
|
||||
@@ -689,34 +658,14 @@ export async function POST(req: NextRequest) {
|
||||
const hasToolInProgress = announced > completed || started > completed
|
||||
if (!hasToolInProgress) {
|
||||
lastSafeDoneResponseId = responseIdFromDone
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Marked done as SAFE (no tools in progress)`
|
||||
)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Done received but tools are in progress (announced=${announced}, started=${started}, completed=${completed})`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (isFirstDone) {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Initial AI response complete, tool count: ${toolCalls.length}`
|
||||
)
|
||||
isFirstDone = false
|
||||
} else {
|
||||
logger.info(`[${tracker.requestId}] Conversation round complete`)
|
||||
}
|
||||
break
|
||||
|
||||
case 'error':
|
||||
logger.error(`[${tracker.requestId}] Stream error event:`, event.error)
|
||||
break
|
||||
|
||||
default:
|
||||
logger.debug(
|
||||
`[${tracker.requestId}] Unknown event type: ${event.type}`,
|
||||
event
|
||||
)
|
||||
}
|
||||
} catch (e) {
|
||||
// Enhanced error handling for large payloads and parsing issues
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/auth'
|
||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
toolName: z.string(),
|
||||
payload: z.unknown().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
try {
|
||||
const preview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
||||
} catch {}
|
||||
|
||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
||||
const result = await routeExecution(toolName, payload)
|
||||
|
||||
try {
|
||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
||||
} catch {}
|
||||
|
||||
return NextResponse.json({ success: true, result })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
||||
return createInternalServerErrorResponse('Failed to execute server tool')
|
||||
}
|
||||
}
|
||||
@@ -1,761 +1,7 @@
|
||||
/**
|
||||
* Tests for copilot methods API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
describe('Copilot Methods API Route', () => {
|
||||
const mockRedisGet = vi.fn()
|
||||
const mockRedisSet = vi.fn()
|
||||
const mockGetRedisClient = vi.fn()
|
||||
const mockToolRegistryHas = vi.fn()
|
||||
const mockToolRegistryGet = vi.fn()
|
||||
const mockToolRegistryExecute = vi.fn()
|
||||
const mockToolRegistryGetAvailableIds = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
setupCommonApiMocks()
|
||||
mockCryptoUuid()
|
||||
|
||||
// Mock Redis client
|
||||
const mockRedisClient = {
|
||||
get: mockRedisGet,
|
||||
set: mockRedisSet,
|
||||
}
|
||||
|
||||
mockGetRedisClient.mockReturnValue(mockRedisClient)
|
||||
mockRedisGet.mockResolvedValue(null)
|
||||
mockRedisSet.mockResolvedValue('OK')
|
||||
|
||||
vi.doMock('@/lib/redis', () => ({
|
||||
getRedisClient: mockGetRedisClient,
|
||||
}))
|
||||
|
||||
// Mock tool registry
|
||||
const mockToolRegistry = {
|
||||
has: mockToolRegistryHas,
|
||||
get: mockToolRegistryGet,
|
||||
execute: mockToolRegistryExecute,
|
||||
getAvailableIds: mockToolRegistryGetAvailableIds,
|
||||
}
|
||||
|
||||
mockToolRegistryHas.mockReturnValue(true)
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: false })
|
||||
mockToolRegistryExecute.mockResolvedValue({ success: true, data: 'Tool executed successfully' })
|
||||
mockToolRegistryGetAvailableIds.mockReturnValue(['test-tool', 'another-tool'])
|
||||
|
||||
vi.doMock('@/lib/copilot/tools/server-tools/registry', () => ({
|
||||
copilotToolRegistry: mockToolRegistry,
|
||||
}))
|
||||
|
||||
// Mock environment variables
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
INTERNAL_API_SECRET: 'test-secret-key',
|
||||
COPILOT_API_KEY: 'test-copilot-key',
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock setTimeout for polling
|
||||
vi.spyOn(global, 'setTimeout').mockImplementation((callback, _delay) => {
|
||||
if (typeof callback === 'function') {
|
||||
setImmediate(callback)
|
||||
}
|
||||
return setTimeout(() => {}, 0) as any
|
||||
})
|
||||
|
||||
// Mock Date.now for timeout control
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 1000 // Add 1 second each call
|
||||
return mockTime
|
||||
})
|
||||
|
||||
// Mock crypto.randomUUID for request IDs
|
||||
vi.spyOn(crypto, 'randomUUID').mockReturnValue('test-request-id')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('POST', () => {
|
||||
it('should return 401 when API key is missing', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: false,
|
||||
error: 'API key required',
|
||||
})
|
||||
})
|
||||
|
||||
it('should return 401 when API key is invalid', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'invalid-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(typeof responseData.error).toBe('string')
|
||||
})
|
||||
|
||||
it('should return 401 when internal API key is not configured', async () => {
|
||||
// Mock environment with no API key
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
INTERNAL_API_SECRET: undefined,
|
||||
COPILOT_API_KEY: 'test-copilot-key',
|
||||
},
|
||||
}))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'any-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.status).toBeUndefined()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(typeof responseData.error).toBe('string')
|
||||
})
|
||||
|
||||
it('should return 400 for invalid request body - missing methodId', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
params: {},
|
||||
// Missing methodId
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Required')
|
||||
})
|
||||
|
||||
it('should return 400 for empty methodId', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: '',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Method ID is required')
|
||||
})
|
||||
|
||||
it('should return 400 when tool is not found in registry', async () => {
|
||||
mockToolRegistryHas.mockReturnValue(false)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'unknown-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Unknown method: unknown-tool')
|
||||
expect(responseData.error).toContain('Available methods: test-tool, another-tool')
|
||||
})
|
||||
|
||||
it('should successfully execute a tool without interruption', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: { key: 'value' },
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('test-tool', { key: 'value' })
|
||||
})
|
||||
|
||||
it('should handle tool execution with default empty params', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
// No params provided
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('test-tool', {})
|
||||
})
|
||||
|
||||
it('should return 400 when tool requires interrupt but no toolCallId provided', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
// No toolCallId provided
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe(
|
||||
'This tool requires approval but no tool call ID was provided'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - user approval', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return accepted status immediately (simulate quick approval)
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'accepted', message: 'User approved' })
|
||||
)
|
||||
|
||||
// Reset Date.now mock to not trigger timeout
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 100 // Small increment to avoid timeout
|
||||
return mockTime
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: { key: 'value' },
|
||||
toolCallId: 'tool-call-123',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
// Verify Redis operations
|
||||
expect(mockRedisSet).toHaveBeenCalledWith(
|
||||
'tool_call:tool-call-123',
|
||||
expect.stringContaining('"status":"pending"'),
|
||||
'EX',
|
||||
86400
|
||||
)
|
||||
expect(mockRedisGet).toHaveBeenCalledWith('tool_call:tool-call-123')
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('interrupt-tool', {
|
||||
key: 'value',
|
||||
confirmationMessage: 'User approved',
|
||||
fullData: {
|
||||
message: 'User approved',
|
||||
status: 'accepted',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - user rejection', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return rejected status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'rejected', message: 'User rejected' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-456',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200) // User rejection returns 200
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe(
|
||||
'The user decided to skip running this tool. This was a user decision.'
|
||||
)
|
||||
|
||||
// Tool should not be executed when rejected
|
||||
expect(mockToolRegistryExecute).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - error status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return error status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'error', message: 'Tool execution failed' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-error',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution failed')
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - background status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return background status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'background', message: 'Running in background' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-bg',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - success status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return success status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'success', message: 'Completed successfully' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-success',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - timeout', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to never return a status (timeout scenario)
|
||||
mockRedisGet.mockResolvedValue(null)
|
||||
|
||||
// Mock Date.now to trigger timeout quickly
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 100000 // Add 100 seconds each call to trigger timeout
|
||||
return mockTime
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-timeout',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Request Timeout
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
|
||||
expect(mockToolRegistryExecute).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle unexpected status in interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return unexpected status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'unknown-status', message: 'Unknown' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-unknown',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Unexpected tool call status: unknown-status')
|
||||
})
|
||||
|
||||
it('should handle Redis client unavailable for interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
mockGetRedisClient.mockReturnValue(null)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-no-redis',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Timeout due to Redis unavailable
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
})
|
||||
|
||||
it('should handle no_op tool with confirmation message', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return accepted status with message
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'accepted', message: 'Confirmation message' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'no_op',
|
||||
params: { existing: 'param' },
|
||||
toolCallId: 'tool-call-noop',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Verify confirmation message was added to params
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('no_op', {
|
||||
existing: 'param',
|
||||
confirmationMessage: 'Confirmation message',
|
||||
fullData: {
|
||||
message: 'Confirmation message',
|
||||
status: 'accepted',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle Redis errors in interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to throw an error
|
||||
mockRedisGet.mockRejectedValue(new Error('Redis connection failed'))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-redis-error',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Timeout due to Redis error
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
})
|
||||
|
||||
it('should handle tool execution failure', async () => {
|
||||
mockToolRegistryExecute.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Tool execution failed',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'failing-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200) // Still returns 200, but with success: false
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: false,
|
||||
error: 'Tool execution failed',
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle JSON parsing errors in request body', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: '{invalid-json',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('JSON')
|
||||
})
|
||||
|
||||
it('should handle tool registry execution throwing an error', async () => {
|
||||
mockToolRegistryExecute.mockRejectedValue(new Error('Registry execution failed'))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'error-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Registry execution failed')
|
||||
})
|
||||
|
||||
it('should handle old format Redis status (string instead of JSON)', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return old format (direct status string)
|
||||
mockRedisGet.mockResolvedValue('accepted')
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-old-format',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
describe('copilot methods route placeholder', () => {
|
||||
it('loads test suite', () => {
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,395 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { copilotToolRegistry } from '@/lib/copilot/tools/server-tools/registry'
|
||||
import type { NotificationStatus } from '@/lib/copilot/types'
|
||||
import { checkCopilotApiKey, checkInternalApiKey } from '@/lib/copilot/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getRedisClient } from '@/lib/redis'
|
||||
import { createErrorResponse } from '@/app/api/copilot/methods/utils'
|
||||
|
||||
const logger = createLogger('CopilotMethodsAPI')
|
||||
|
||||
/**
|
||||
* Add a tool call to Redis with 'pending' status
|
||||
*/
|
||||
async function addToolToRedis(toolCallId: string): Promise<void> {
|
||||
if (!toolCallId) {
|
||||
logger.warn('addToolToRedis: No tool call ID provided')
|
||||
return
|
||||
}
|
||||
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('addToolToRedis: Redis client not available')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const key = `tool_call:${toolCallId}`
|
||||
const status: NotificationStatus = 'pending'
|
||||
|
||||
// Store as JSON object for consistency with confirm API
|
||||
const toolCallData = {
|
||||
status,
|
||||
message: null,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
// Set with 24 hour expiry (86400 seconds)
|
||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400)
|
||||
|
||||
logger.info('Tool call added to Redis', {
|
||||
toolCallId,
|
||||
key,
|
||||
status,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to add tool call to Redis', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll Redis for tool call status updates
|
||||
* Returns when status changes to 'Accepted' or 'Rejected', or times out after 60 seconds
|
||||
*/
|
||||
async function pollRedisForTool(
|
||||
toolCallId: string
|
||||
): Promise<{ status: NotificationStatus; message?: string; fullData?: any } | null> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('pollRedisForTool: Redis client not available')
|
||||
return null
|
||||
}
|
||||
|
||||
const key = `tool_call:${toolCallId}`
|
||||
const timeout = 600000 // 10 minutes for long-running operations
|
||||
const pollInterval = 1000 // 1 second
|
||||
const startTime = Date.now()
|
||||
|
||||
while (Date.now() - startTime < timeout) {
|
||||
try {
|
||||
const redisValue = await redis.get(key)
|
||||
if (!redisValue) {
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
||||
continue
|
||||
}
|
||||
|
||||
let status: NotificationStatus | null = null
|
||||
let message: string | undefined
|
||||
let fullData: any = null
|
||||
|
||||
// Try to parse as JSON (new format), fallback to string (old format)
|
||||
try {
|
||||
const parsedData = JSON.parse(redisValue)
|
||||
status = parsedData.status as NotificationStatus
|
||||
message = parsedData.message || undefined
|
||||
fullData = parsedData // Store the full parsed data
|
||||
} catch {
|
||||
// Fallback to old format (direct status string)
|
||||
status = redisValue as NotificationStatus
|
||||
}
|
||||
|
||||
if (status !== 'pending') {
|
||||
// Log the message found in redis prominently - always log, even if message is null/undefined
|
||||
logger.info('Redis poller found non-pending status', {
|
||||
toolCallId,
|
||||
foundMessage: message,
|
||||
messageType: typeof message,
|
||||
messageIsNull: message === null,
|
||||
messageIsUndefined: message === undefined,
|
||||
status,
|
||||
duration: Date.now() - startTime,
|
||||
rawRedisValue: redisValue,
|
||||
})
|
||||
|
||||
// Special logging for set environment variables tool when Redis status is found
|
||||
if (toolCallId && (status === 'accepted' || status === 'rejected')) {
|
||||
logger.info('SET_ENV_VARS: Redis polling found status update', {
|
||||
toolCallId,
|
||||
foundStatus: status,
|
||||
redisMessage: message,
|
||||
pollDuration: Date.now() - startTime,
|
||||
redisKey: `tool_call:${toolCallId}`,
|
||||
})
|
||||
}
|
||||
|
||||
return { status, message, fullData }
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
||||
} catch (error) {
|
||||
logger.error('Error polling Redis for tool call status', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('Tool call polling timed out', {
|
||||
toolCallId,
|
||||
timeout,
|
||||
})
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tool calls that require user interruption/approval
|
||||
* Returns { approved: boolean, rejected: boolean, error?: boolean, message?: string } to distinguish between rejection, timeout, and error
|
||||
*/
|
||||
async function interruptHandler(toolCallId: string): Promise<{
|
||||
approved: boolean
|
||||
rejected: boolean
|
||||
error?: boolean
|
||||
message?: string
|
||||
fullData?: any
|
||||
}> {
|
||||
if (!toolCallId) {
|
||||
logger.error('interruptHandler: No tool call ID provided')
|
||||
return { approved: false, rejected: false, error: true, message: 'No tool call ID provided' }
|
||||
}
|
||||
|
||||
logger.info('Starting interrupt handler for tool call', { toolCallId })
|
||||
|
||||
try {
|
||||
// Step 1: Add tool to Redis with 'pending' status
|
||||
await addToolToRedis(toolCallId)
|
||||
|
||||
// Step 2: Poll Redis for status update
|
||||
const result = await pollRedisForTool(toolCallId)
|
||||
|
||||
if (!result) {
|
||||
logger.error('Failed to get tool call status or timed out', { toolCallId })
|
||||
return { approved: false, rejected: false }
|
||||
}
|
||||
|
||||
const { status, message, fullData } = result
|
||||
|
||||
if (status === 'rejected') {
|
||||
logger.info('Tool execution rejected by user', { toolCallId, message })
|
||||
return { approved: false, rejected: true, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'accepted') {
|
||||
logger.info('Tool execution approved by user', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'error') {
|
||||
logger.error('Tool execution failed with error', { toolCallId, message })
|
||||
return { approved: false, rejected: false, error: true, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'background') {
|
||||
logger.info('Tool execution moved to background', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'success') {
|
||||
logger.info('Tool execution completed successfully', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
logger.warn('Unexpected tool call status', { toolCallId, status, message })
|
||||
return {
|
||||
approved: false,
|
||||
rejected: false,
|
||||
error: true,
|
||||
message: `Unexpected tool call status: ${status}`,
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error('Error in interrupt handler', {
|
||||
toolCallId,
|
||||
error: errorMessage,
|
||||
})
|
||||
return {
|
||||
approved: false,
|
||||
rejected: false,
|
||||
error: true,
|
||||
message: `Interrupt handler error: ${errorMessage}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MethodExecutionSchema = z.object({
|
||||
methodId: z.string().min(1, 'Method ID is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
toolCallId: z.string().nullable().optional().default(null),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/methods
|
||||
* Execute a method based on methodId with internal API key auth
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
const startTime = Date.now()
|
||||
|
||||
try {
|
||||
// Evaluate both auth schemes; pass if either is valid
|
||||
const internalAuth = checkInternalApiKey(req)
|
||||
const copilotAuth = checkCopilotApiKey(req)
|
||||
const isAuthenticated = !!(internalAuth?.success || copilotAuth?.success)
|
||||
if (!isAuthenticated) {
|
||||
const errorMessage = copilotAuth.error || internalAuth.error || 'Authentication failed'
|
||||
return NextResponse.json(createErrorResponse(errorMessage), {
|
||||
status: 401,
|
||||
})
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { methodId, params, toolCallId } = MethodExecutionSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Method execution request`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
hasParams: !!params && Object.keys(params).length > 0,
|
||||
})
|
||||
|
||||
// Check if tool exists in registry
|
||||
if (!copilotToolRegistry.has(methodId)) {
|
||||
logger.error(`[${requestId}] Tool not found in registry: ${methodId}`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
availableTools: copilotToolRegistry.getAvailableIds(),
|
||||
registrySize: copilotToolRegistry.getAvailableIds().length,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
`Unknown method: ${methodId}. Available methods: ${copilotToolRegistry.getAvailableIds().join(', ')}`
|
||||
),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool found in registry: ${methodId}`, {
|
||||
toolCallId,
|
||||
})
|
||||
|
||||
// Check if the tool requires interrupt/approval
|
||||
const tool = copilotToolRegistry.get(methodId)
|
||||
if (tool?.requiresInterrupt) {
|
||||
if (!toolCallId) {
|
||||
logger.warn(`[${requestId}] Tool requires interrupt but no toolCallId provided`, {
|
||||
methodId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse('This tool requires approval but no tool call ID was provided'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool requires interrupt, starting approval process`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
})
|
||||
|
||||
// Handle interrupt flow
|
||||
const { approved, rejected, error, message, fullData } = await interruptHandler(toolCallId)
|
||||
|
||||
if (rejected) {
|
||||
logger.info(`[${requestId}] Tool execution rejected by user`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
'The user decided to skip running this tool. This was a user decision.'
|
||||
),
|
||||
{ status: 200 } // Changed to 200 - user rejection is a valid response
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
logger.error(`[${requestId}] Tool execution failed with error`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(message || 'Tool execution failed with unknown error'),
|
||||
{ status: 500 } // 500 Internal Server Error
|
||||
)
|
||||
}
|
||||
|
||||
if (!approved) {
|
||||
logger.warn(`[${requestId}] Tool execution timed out`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse('Tool execution request timed out'),
|
||||
{ status: 408 } // 408 Request Timeout
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool execution approved by user`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
|
||||
// For tools that need confirmation data, pass the message and/or fullData as parameters
|
||||
if (message) {
|
||||
params.confirmationMessage = message
|
||||
}
|
||||
if (fullData) {
|
||||
params.fullData = fullData
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the tool directly via registry
|
||||
const result = await copilotToolRegistry.execute(methodId, params)
|
||||
|
||||
logger.info(`[${requestId}] Tool execution result:`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
success: result.success,
|
||||
hasData: !!result.data,
|
||||
hasError: !!result.error,
|
||||
})
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
logger.info(`[${requestId}] Method execution completed: ${methodId}`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
duration,
|
||||
success: result.success,
|
||||
})
|
||||
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error(`[${requestId}] Request validation error:`, {
|
||||
duration,
|
||||
errors: error.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
`Invalid request data: ${error.errors.map((e) => e.message).join(', ')}`
|
||||
),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Unexpected error:`, {
|
||||
duration,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
createErrorResponse(error instanceof Error ? error.message : 'Internal server error'),
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import type { CopilotToolResponse } from '@/lib/copilot/tools/server-tools/base'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('CopilotMethodsUtils')
|
||||
|
||||
/**
|
||||
* Create a standardized error response
|
||||
*/
|
||||
export function createErrorResponse(error: string): CopilotToolResponse {
|
||||
return {
|
||||
success: false,
|
||||
error,
|
||||
}
|
||||
}
|
||||
125
apps/sim/app/api/copilot/tools/mark-complete/route.ts
Normal file
125
apps/sim/app/api/copilot/tools/mark-complete/route.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
|
||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
||||
|
||||
// Sim Agent API configuration
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
// Schema for mark-complete request
|
||||
const MarkCompleteSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
status: z.number().int(),
|
||||
message: z.any().optional(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/tools/mark-complete
|
||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
||||
try {
|
||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
const parsed = MarkCompleteSchema.parse(body)
|
||||
|
||||
const messagePreview = (() => {
|
||||
try {
|
||||
const s =
|
||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
})()
|
||||
|
||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
||||
userId,
|
||||
toolCallId: parsed.id,
|
||||
toolName: parsed.name,
|
||||
status: parsed.status,
|
||||
hasMessage: parsed.message !== undefined,
|
||||
hasData: parsed.data !== undefined,
|
||||
messagePreview,
|
||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
||||
})
|
||||
|
||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(parsed),
|
||||
})
|
||||
|
||||
// Attempt to parse agent response JSON
|
||||
let agentJson: any = null
|
||||
let agentText: string | null = null
|
||||
try {
|
||||
agentJson = await agentRes.json()
|
||||
} catch (_) {
|
||||
try {
|
||||
agentText = await agentRes.text()
|
||||
} catch {}
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
||||
status: agentRes.status,
|
||||
ok: agentRes.ok,
|
||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
||||
})
|
||||
|
||||
if (agentRes.ok) {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
const errorMessage =
|
||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
||||
const status = agentRes.status >= 500 ? 500 : 400
|
||||
|
||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
||||
status,
|
||||
error: errorMessage,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
||||
issues: error.issues,
|
||||
})
|
||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
||||
}
|
||||
}
|
||||
@@ -213,24 +213,81 @@ function createUserFriendlyErrorMessage(
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
* @param params - Parameters that may contain variable values
|
||||
* @param envVars - Environment variables from the workflow
|
||||
* @returns Resolved code
|
||||
* Resolves workflow variables with <variable.name> syntax
|
||||
*/
|
||||
function resolveWorkflowVariables(
|
||||
code: string,
|
||||
workflowVariables: Record<string, any>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
|
||||
function resolveCodeVariables(
|
||||
const variableMatches = resolvedCode.match(/<variable\.([^>]+)>/g) || []
|
||||
for (const match of variableMatches) {
|
||||
const variableName = match.slice('<variable.'.length, -1).trim()
|
||||
|
||||
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
|
||||
const foundVariable = Object.entries(workflowVariables).find(
|
||||
([_, variable]) => (variable.name || '').replace(/\s+/g, '') === variableName
|
||||
)
|
||||
|
||||
if (foundVariable) {
|
||||
const variable = foundVariable[1]
|
||||
// Get the typed value - handle different variable types
|
||||
let variableValue = variable.value
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
try {
|
||||
// Handle 'string' type the same as 'plain' for backward compatibility
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
// For plain text, use exactly what's entered without modifications
|
||||
if (type === 'plain' && typeof variableValue === 'string') {
|
||||
// Use as-is for plain text
|
||||
} else if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json') {
|
||||
try {
|
||||
variableValue =
|
||||
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
|
||||
} catch {
|
||||
// Keep original value if JSON parsing fails
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Fallback to original value on error
|
||||
variableValue = variable.value
|
||||
}
|
||||
}
|
||||
|
||||
// Create a safe variable reference
|
||||
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = variableValue
|
||||
|
||||
// Replace the variable reference with the safe variable name
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
} else {
|
||||
// Variable not found - replace with empty string to avoid syntax errors
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), '')
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables with {{var_name}} syntax
|
||||
*/
|
||||
function resolveEnvironmentVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
envVars: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
const envVarMatches = resolvedCode.match(/\{\{([^}]+)\}\}/g) || []
|
||||
for (const match of envVarMatches) {
|
||||
const varName = match.slice(2, -2).trim()
|
||||
@@ -245,7 +302,21 @@ function resolveCodeVariables(
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
*/
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
|
||||
const tagMatches = resolvedCode.match(/<([a-zA-Z_][a-zA-Z0-9_.]*[a-zA-Z0-9_])>/g) || []
|
||||
|
||||
for (const match of tagMatches) {
|
||||
@@ -300,6 +371,42 @@ function resolveCodeVariables(
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
* @param params - Parameters that may contain variable values
|
||||
* @param envVars - Environment variables from the workflow
|
||||
* @returns Resolved code
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve workflow variables with <variable.name> syntax first
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
params,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
contextVariables
|
||||
)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
@@ -338,6 +445,7 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
@@ -360,7 +468,8 @@ export async function POST(req: NextRequest) {
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping
|
||||
blockNameMapping,
|
||||
workflowVariables
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
@@ -368,8 +477,8 @@ export async function POST(req: NextRequest) {
|
||||
const executionMethod = 'vm' // Default execution method
|
||||
|
||||
logger.info(`[${requestId}] Using VM for code execution`, {
|
||||
resolvedCode,
|
||||
hasEnvVars: Object.keys(envVars).length > 0,
|
||||
hasWorkflowVariables: Object.keys(workflowVariables).length > 0,
|
||||
})
|
||||
|
||||
// Create a secure context with console logging
|
||||
|
||||
@@ -73,30 +73,59 @@ export async function GET(request: NextRequest) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Conditionally select columns based on detail level to optimize performance
|
||||
const selectColumns =
|
||||
params.details === 'full'
|
||||
? {
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: workflowExecutionLogs.executionData, // Large field - only in full mode
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: workflowExecutionLogs.files, // Large field - only in full mode
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
}
|
||||
: {
|
||||
// Basic mode - exclude large fields for better performance
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: sql<null>`NULL`, // Exclude large execution data in basic mode
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: sql<null>`NULL`, // Exclude files in basic mode
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
}
|
||||
|
||||
const baseQuery = db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: workflowExecutionLogs.executionData,
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: workflowExecutionLogs.files,
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
})
|
||||
.select(selectColumns)
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
@@ -276,18 +305,24 @@ export async function GET(request: NextRequest) {
|
||||
const enhancedLogs = logs.map((log) => {
|
||||
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
|
||||
|
||||
// Use stored trace spans if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.executionData as any)?.traceSpans
|
||||
const traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
// Only process trace spans and detailed cost in full mode
|
||||
let traceSpans = []
|
||||
let costSummary = (log.cost as any) || { total: 0 }
|
||||
|
||||
// Prefer stored cost JSON; otherwise synthesize from blocks
|
||||
const costSummary =
|
||||
log.cost && Object.keys(log.cost as any).length > 0
|
||||
? (log.cost as any)
|
||||
: extractCostSummary(blockExecutions)
|
||||
if (params.details === 'full' && log.executionData) {
|
||||
// Use stored trace spans if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.executionData as any)?.traceSpans
|
||||
traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
|
||||
// Prefer stored cost JSON; otherwise synthesize from blocks
|
||||
costSummary =
|
||||
log.cost && Object.keys(log.cost as any).length > 0
|
||||
? (log.cost as any)
|
||||
: extractCostSummary(blockExecutions)
|
||||
}
|
||||
|
||||
const workflowSummary = {
|
||||
id: log.workflowId,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { member, user, userStats } from '@/db/schema'
|
||||
@@ -80,8 +81,6 @@ export async function GET(
|
||||
.select({
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
lastPeriodCost: userStats.lastPeriodCost,
|
||||
@@ -90,11 +89,22 @@ export async function GET(
|
||||
.where(eq(userStats.userId, memberId))
|
||||
.limit(1)
|
||||
|
||||
const computed = await getUserUsageData(memberId)
|
||||
|
||||
if (usageData.length > 0) {
|
||||
memberData = {
|
||||
...memberData,
|
||||
usage: usageData[0],
|
||||
} as typeof memberData & { usage: (typeof usageData)[0] }
|
||||
usage: {
|
||||
...usageData[0],
|
||||
billingPeriodStart: computed.billingPeriodStart,
|
||||
billingPeriodEnd: computed.billingPeriodEnd,
|
||||
},
|
||||
} as typeof memberData & {
|
||||
usage: (typeof usageData)[0] & {
|
||||
billingPeriodStart: Date | null
|
||||
billingPeriodEnd: Date | null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { quickValidateEmail } from '@/lib/email/validation'
|
||||
@@ -63,7 +64,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
// Include usage data if requested and user has admin access
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const membersWithUsage = await db
|
||||
const base = await db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
@@ -74,8 +75,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
userEmail: user.email,
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
})
|
||||
@@ -84,6 +83,17 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
.leftJoin(userStats, eq(user.id, userStats.userId))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const membersWithUsage = await Promise.all(
|
||||
base.map(async (row) => {
|
||||
const usage = await getUserUsageData(row.userId)
|
||||
return {
|
||||
...row,
|
||||
billingPeriodStart: usage.billingPeriodStart,
|
||||
billingPeriodEnd: usage.billingPeriodEnd,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: membersWithUsage,
|
||||
|
||||
@@ -39,6 +39,9 @@ export async function POST(request: NextRequest) {
|
||||
stream,
|
||||
messages,
|
||||
environmentVariables,
|
||||
workflowVariables,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
} = body
|
||||
@@ -60,6 +63,7 @@ export async function POST(request: NextRequest) {
|
||||
messageCount: messages?.length || 0,
|
||||
hasEnvironmentVariables:
|
||||
!!environmentVariables && Object.keys(environmentVariables).length > 0,
|
||||
hasWorkflowVariables: !!workflowVariables && Object.keys(workflowVariables).length > 0,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
})
|
||||
@@ -103,6 +107,9 @@ export async function POST(request: NextRequest) {
|
||||
stream,
|
||||
messages,
|
||||
environmentVariables,
|
||||
workflowVariables,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
})
|
||||
|
||||
@@ -474,8 +474,10 @@ export async function GET() {
|
||||
})
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
message: `Schedule execution failed before workflow started: ${earlyError.message}`,
|
||||
stackTrace: earlyError.stack,
|
||||
error: {
|
||||
message: `Schedule execution failed before workflow started: ${earlyError.message}`,
|
||||
stackTrace: earlyError.stack,
|
||||
},
|
||||
})
|
||||
} catch (loggingError) {
|
||||
logger.error(
|
||||
@@ -591,8 +593,10 @@ export async function GET() {
|
||||
})
|
||||
|
||||
await failureLoggingSession.safeCompleteWithError({
|
||||
message: `Schedule execution failed: ${error.message}`,
|
||||
stackTrace: error.stack,
|
||||
error: {
|
||||
message: `Schedule execution failed: ${error.message}`,
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
} catch (loggingError) {
|
||||
logger.error(
|
||||
|
||||
67
apps/sim/app/api/tools/mysql/delete/route.ts
Normal file
67
apps/sim/app/api/tools/mysql/delete/route.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL delete failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
75
apps/sim/app/api/tools/mysql/execute/route.ts
Normal file
75
apps/sim/app/api/tools/mysql/execute/route.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL execute failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
91
apps/sim/app/api/tools/mysql/insert/route.ts
Normal file
91
apps/sim/app/api/tools/mysql/insert/route.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL insert failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
75
apps/sim/app/api/tools/mysql/query/route.ts
Normal file
75
apps/sim/app/api/tools/mysql/query/route.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing MySQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
86
apps/sim/app/api/tools/mysql/update/route.ts
Normal file
86
apps/sim/app/api/tools/mysql/update/route.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL update failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL update failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
159
apps/sim/app/api/tools/mysql/utils.ts
Normal file
159
apps/sim/app/api/tools/mysql/utils.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import mysql from 'mysql2/promise'
|
||||
|
||||
export interface MySQLConnectionConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
username: string
|
||||
password: string
|
||||
ssl?: string
|
||||
}
|
||||
|
||||
export async function createMySQLConnection(config: MySQLConnectionConfig) {
|
||||
const connectionConfig: mysql.ConnectionOptions = {
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
}
|
||||
|
||||
// Handle SSL configuration
|
||||
if (config.ssl === 'required') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: true }
|
||||
} else if (config.ssl === 'preferred') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: false }
|
||||
}
|
||||
// For 'disabled', we don't set the ssl property at all
|
||||
|
||||
return mysql.createConnection(connectionConfig)
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
connection: mysql.Connection,
|
||||
query: string,
|
||||
values?: unknown[]
|
||||
) {
|
||||
const [rows, fields] = await connection.execute(query, values)
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
return {
|
||||
rows: rows as unknown[],
|
||||
rowCount: rows.length,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rows: [],
|
||||
rowCount: (rows as mysql.ResultSetHeader).affectedRows || 0,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/set\s+global/i,
|
||||
/set\s+session/i,
|
||||
/load\s+data/i,
|
||||
/into\s+outfile/i,
|
||||
/into\s+dumpfile/i,
|
||||
/load_file\s*\(/i,
|
||||
/system\s+/i,
|
||||
/exec\s+/i,
|
||||
/execute\s+immediate/i,
|
||||
/xp_cmdshell/i,
|
||||
/sp_configure/i,
|
||||
/information_schema\.tables/i,
|
||||
/mysql\.user/i,
|
||||
/mysql\.db/i,
|
||||
/mysql\.host/i,
|
||||
/performance_schema/i,
|
||||
/sys\./i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|show|describe|explain)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, SHOW, DESCRIBE, and EXPLAIN statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function buildInsertQuery(table: string, data: Record<string, unknown>) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
const placeholders = columns.map(() => '?').join(', ')
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${columns.map(sanitizeIdentifier).join(', ')}) VALUES (${placeholders})`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(table: string, data: Record<string, unknown>, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
|
||||
const setClause = columns.map((col) => `${sanitizeIdentifier(col)} = ?`).join(', ')
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where}`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(table: string, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where}`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing backticks to prevent double-escaping
|
||||
const cleaned = identifier.replace(/`/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in backticks for MySQL
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
74
apps/sim/app/api/tools/postgresql/delete/route.ts
Normal file
74
apps/sim/app/api/tools/postgresql/delete/route.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildDeleteQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL delete failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
82
apps/sim/app/api/tools/postgresql/execute/route.ts
Normal file
82
apps/sim/app/api/tools/postgresql/execute/route.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
validateQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL execute failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
99
apps/sim/app/api/tools/postgresql/insert/route.ts
Normal file
99
apps/sim/app/api/tools/postgresql/insert/route.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildInsertQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
// Debug: Log the data field to see what we're getting
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL insert failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
65
apps/sim/app/api/tools/postgresql/query/route.ts
Normal file
65
apps/sim/app/api/tools/postgresql/query/route.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing PostgreSQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `PostgreSQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
93
apps/sim/app/api/tools/postgresql/update/route.ts
Normal file
93
apps/sim/app/api/tools/postgresql/update/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildUpdateQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL update failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL update failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
173
apps/sim/app/api/tools/postgresql/utils.ts
Normal file
173
apps/sim/app/api/tools/postgresql/utils.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { Client } from 'pg'
|
||||
import type { PostgresConnectionConfig } from '@/tools/postgresql/types'
|
||||
|
||||
export async function createPostgresConnection(config: PostgresConnectionConfig): Promise<Client> {
|
||||
const client = new Client({
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
ssl:
|
||||
config.ssl === 'disabled'
|
||||
? false
|
||||
: config.ssl === 'required'
|
||||
? true
|
||||
: config.ssl === 'preferred'
|
||||
? { rejectUnauthorized: false }
|
||||
: false,
|
||||
connectionTimeoutMillis: 10000, // 10 seconds
|
||||
query_timeout: 30000, // 30 seconds
|
||||
})
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
return client
|
||||
} catch (error) {
|
||||
await client.end()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
client: Client,
|
||||
query: string,
|
||||
params: unknown[] = []
|
||||
): Promise<{ rows: unknown[]; rowCount: number }> {
|
||||
const result = await client.query(query, params)
|
||||
return {
|
||||
rows: result.rows || [],
|
||||
rowCount: result.rowCount || 0,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/create\s+role/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/alter\s+role/i,
|
||||
/set\s+role/i,
|
||||
/reset\s+role/i,
|
||||
/copy\s+.*from/i,
|
||||
/copy\s+.*to/i,
|
||||
/lo_import/i,
|
||||
/lo_export/i,
|
||||
/pg_read_file/i,
|
||||
/pg_write_file/i,
|
||||
/pg_ls_dir/i,
|
||||
/information_schema\.tables/i,
|
||||
/pg_catalog/i,
|
||||
/pg_user/i,
|
||||
/pg_shadow/i,
|
||||
/pg_roles/i,
|
||||
/pg_authid/i,
|
||||
/pg_stat_activity/i,
|
||||
/dblink/i,
|
||||
/\\\\copy/i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|explain|analyze|show)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, EXPLAIN, ANALYZE, and SHOW statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing double quotes to prevent double-escaping
|
||||
const cleaned = identifier.replace(/"/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in double quotes for PostgreSQL
|
||||
return `"${cleaned}"`
|
||||
}
|
||||
|
||||
export function buildInsertQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const placeholders = columns.map((_, index) => `$${index + 1}`)
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${sanitizedColumns.join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const setClause = sanitizedColumns.map((col, index) => `${col} = $${index + 1}`).join(', ')
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(
|
||||
table: string,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Check, Eye, X } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
@@ -201,6 +201,34 @@ export function DiffControls() {
|
||||
logger.warn('Failed to clear preview YAML:', error)
|
||||
})
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
||||
try {
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
} catch {}
|
||||
|
||||
// Accept changes without blocking the UI; errors will be logged by the store handler
|
||||
acceptChanges().catch((error) => {
|
||||
logger.error('Failed to accept changes (background):', error)
|
||||
@@ -224,6 +252,34 @@ export function DiffControls() {
|
||||
logger.warn('Failed to clear preview YAML:', error)
|
||||
})
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal rejected state in the copilot store
|
||||
try {
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('rejected', id)
|
||||
} catch {}
|
||||
|
||||
// Reject changes optimistically
|
||||
rejectChanges().catch((error) => {
|
||||
logger.error('Failed to reject changes (background):', error)
|
||||
@@ -232,58 +288,39 @@ export function DiffControls() {
|
||||
|
||||
return (
|
||||
<div className='-translate-x-1/2 fixed bottom-20 left-1/2 z-30'>
|
||||
<div className='rounded-lg border bg-background/95 p-4 shadow-lg backdrop-blur-sm'>
|
||||
<div className='flex items-center gap-4'>
|
||||
{/* Info section */}
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='flex h-8 w-8 items-center justify-center rounded-full bg-purple-100 dark:bg-purple-900'>
|
||||
<Eye className='h-4 w-4 text-purple-600 dark:text-purple-400' />
|
||||
</div>
|
||||
<div className='flex flex-col'>
|
||||
<span className='font-medium text-sm'>
|
||||
{isShowingDiff ? 'Viewing Proposed Changes' : 'Copilot has proposed changes'}
|
||||
</span>
|
||||
{diffMetadata && (
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
Source: {diffMetadata.source} •{' '}
|
||||
{new Date(diffMetadata.timestamp).toLocaleTimeString()}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* Toggle (left, icon-only, no background) */}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={handleToggleDiff}
|
||||
className='h-8 rounded-full px-2 text-muted-foreground hover:bg-transparent'
|
||||
title={isShowingDiff ? 'View original' : 'Preview changes'}
|
||||
>
|
||||
{isShowingDiff ? <Eye className='h-5 w-5' /> : <EyeOff className='h-5 w-5' />}
|
||||
</Button>
|
||||
|
||||
{/* Controls */}
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* Toggle View Button */}
|
||||
<Button
|
||||
variant={isShowingDiff ? 'default' : 'outline'}
|
||||
size='sm'
|
||||
onClick={handleToggleDiff}
|
||||
className='h-8'
|
||||
>
|
||||
{isShowingDiff ? 'View Original' : 'Preview Changes'}
|
||||
</Button>
|
||||
{/* Reject (middle, light gray, icon-only) */}
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={handleReject}
|
||||
className='h-8 rounded-[6px] border-gray-200 bg-gray-100 px-3 text-gray-700 hover:bg-gray-200 dark:border-gray-700 dark:bg-gray-800 dark:text-gray-200 dark:hover:bg-gray-700'
|
||||
title='Reject changes'
|
||||
>
|
||||
Reject
|
||||
</Button>
|
||||
|
||||
{/* Accept/Reject buttons - only show when viewing diff */}
|
||||
{isShowingDiff && (
|
||||
<>
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={handleAccept}
|
||||
className='h-8 bg-green-600 px-3 hover:bg-green-700'
|
||||
>
|
||||
<Check className='mr-1 h-3 w-3' />
|
||||
Accept
|
||||
</Button>
|
||||
<Button variant='destructive' size='sm' onClick={handleReject} className='h-8 px-3'>
|
||||
<X className='mr-1 h-3 w-3' />
|
||||
Reject
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Accept (right, brand purple, icon-only) */}
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={handleAccept}
|
||||
className='h-8 rounded-[6px] bg-[var(--brand-primary-hover-hex)] px-3 text-white hover:bg-[var(--brand-primary-hover-hex)]/90 hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
title='Accept changes'
|
||||
>
|
||||
Accept
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { type KeyboardEvent, useCallback, useEffect, useMemo, useRef, useState }
|
||||
import { ArrowDown, ArrowUp } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Notice } from '@/components/ui/notice'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
@@ -32,12 +33,11 @@ interface ChatFile {
|
||||
}
|
||||
|
||||
interface ChatProps {
|
||||
panelWidth: number
|
||||
chatMessage: string
|
||||
setChatMessage: (message: string) => void
|
||||
}
|
||||
|
||||
export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
export function Chat({ chatMessage, setChatMessage }: ChatProps) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
const {
|
||||
@@ -63,6 +63,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
// File upload state
|
||||
const [chatFiles, setChatFiles] = useState<ChatFile[]>([])
|
||||
const [isUploadingFiles, setIsUploadingFiles] = useState(false)
|
||||
const [uploadErrors, setUploadErrors] = useState<string[]>([])
|
||||
const [dragCounter, setDragCounter] = useState(0)
|
||||
const isDragOver = dragCounter > 0
|
||||
// Scroll state
|
||||
@@ -280,11 +281,15 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
type: chatFile.type,
|
||||
file: chatFile.file, // Pass the actual File object
|
||||
}))
|
||||
workflowInput.onUploadError = (message: string) => {
|
||||
setUploadErrors((prev) => [...prev, message])
|
||||
}
|
||||
}
|
||||
|
||||
// Clear input and files, refocus immediately
|
||||
setChatMessage('')
|
||||
setChatFiles([])
|
||||
setUploadErrors([])
|
||||
focusInput(10)
|
||||
|
||||
// Execute the workflow to generate a response
|
||||
@@ -560,14 +565,16 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
No messages yet
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea ref={scrollAreaRef} className='h-full pb-2' hideScrollbar={true}>
|
||||
<div>
|
||||
{workflowMessages.map((message) => (
|
||||
<ChatMessage key={message.id} message={message} />
|
||||
))}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</ScrollArea>
|
||||
<div ref={scrollAreaRef} className='h-full'>
|
||||
<ScrollArea className='h-full pb-2' hideScrollbar={true}>
|
||||
<div>
|
||||
{workflowMessages.map((message) => (
|
||||
<ChatMessage key={message.id} message={message} />
|
||||
))}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Scroll to bottom button */}
|
||||
@@ -615,26 +622,68 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
if (!(!activeWorkflowId || isExecuting || isUploadingFiles)) {
|
||||
const droppedFiles = Array.from(e.dataTransfer.files)
|
||||
if (droppedFiles.length > 0) {
|
||||
const newFiles = droppedFiles.slice(0, 5 - chatFiles.length).map((file) => ({
|
||||
id: crypto.randomUUID(),
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
file,
|
||||
}))
|
||||
setChatFiles([...chatFiles, ...newFiles])
|
||||
const remainingSlots = Math.max(0, 5 - chatFiles.length)
|
||||
const candidateFiles = droppedFiles.slice(0, remainingSlots)
|
||||
const errors: string[] = []
|
||||
const validNewFiles: ChatFile[] = []
|
||||
|
||||
for (const file of candidateFiles) {
|
||||
if (file.size > 10 * 1024 * 1024) {
|
||||
errors.push(`${file.name} is too large (max 10MB)`)
|
||||
continue
|
||||
}
|
||||
|
||||
const isDuplicate = chatFiles.some(
|
||||
(existingFile) =>
|
||||
existingFile.name === file.name && existingFile.size === file.size
|
||||
)
|
||||
if (isDuplicate) {
|
||||
errors.push(`${file.name} already added`)
|
||||
continue
|
||||
}
|
||||
|
||||
validNewFiles.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
file,
|
||||
})
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
setUploadErrors(errors)
|
||||
}
|
||||
|
||||
if (validNewFiles.length > 0) {
|
||||
setChatFiles([...chatFiles, ...validNewFiles])
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
>
|
||||
{/* File upload section */}
|
||||
<div className='mb-2'>
|
||||
{uploadErrors.length > 0 && (
|
||||
<div className='mb-2'>
|
||||
<Notice variant='error' title='File upload error'>
|
||||
<ul className='list-disc pl-5'>
|
||||
{uploadErrors.map((err, idx) => (
|
||||
<li key={idx}>{err}</li>
|
||||
))}
|
||||
</ul>
|
||||
</Notice>
|
||||
</div>
|
||||
)}
|
||||
<ChatFileUpload
|
||||
files={chatFiles}
|
||||
onFilesChange={setChatFiles}
|
||||
onFilesChange={(files) => {
|
||||
setChatFiles(files)
|
||||
}}
|
||||
maxFiles={5}
|
||||
maxSize={10}
|
||||
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
||||
onError={(errors) => setUploadErrors(errors)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ interface ChatFileUploadProps {
|
||||
maxSize?: number // in MB
|
||||
acceptedTypes?: string[]
|
||||
disabled?: boolean
|
||||
onError?: (errors: string[]) => void
|
||||
}
|
||||
|
||||
export function ChatFileUpload({
|
||||
@@ -30,6 +31,7 @@ export function ChatFileUpload({
|
||||
maxSize = 10,
|
||||
acceptedTypes = ['*'],
|
||||
disabled = false,
|
||||
onError,
|
||||
}: ChatFileUploadProps) {
|
||||
const [isDragOver, setIsDragOver] = useState(false)
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
@@ -91,7 +93,7 @@ export function ChatFileUpload({
|
||||
|
||||
if (errors.length > 0) {
|
||||
logger.warn('File upload errors:', errors)
|
||||
// You could show these errors in a toast or alert
|
||||
onError?.(errors)
|
||||
}
|
||||
|
||||
if (newFiles.length > 0) {
|
||||
@@ -168,7 +170,12 @@ export function ChatFileUpload({
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
multiple
|
||||
onChange={(e) => handleFileSelect(e.target.files)}
|
||||
onChange={(e) => {
|
||||
handleFileSelect(e.target.files)
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
}
|
||||
}}
|
||||
className='hidden'
|
||||
accept={acceptedTypes.join(',')}
|
||||
disabled={disabled}
|
||||
|
||||
@@ -25,7 +25,7 @@ export function Console({ panelWidth }: ConsoleProps) {
|
||||
No console entries
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<ScrollArea className='h-full' hideScrollbar={false}>
|
||||
<div className='space-y-3'>
|
||||
{filteredEntries.map((entry) => (
|
||||
<ConsoleEntry key={entry.id} entry={entry} consoleWidth={panelWidth} />
|
||||
|
||||
@@ -78,6 +78,14 @@ if (typeof document !== 'undefined') {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
/* Reduce top margin for first heading (e.g., right after thinking block) */
|
||||
.copilot-markdown-wrapper > h1:first-child,
|
||||
.copilot-markdown-wrapper > h2:first-child,
|
||||
.copilot-markdown-wrapper > h3:first-child,
|
||||
.copilot-markdown-wrapper > h4:first-child {
|
||||
margin-top: 0.25rem !important;
|
||||
}
|
||||
`
|
||||
document.head.appendChild(style)
|
||||
}
|
||||
@@ -140,17 +148,17 @@ export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRend
|
||||
|
||||
// Headings
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-10 mb-5 font-geist-sans font-semibold text-2xl text-gray-900 dark:text-gray-100'>
|
||||
<h1 className='mt-3 mb-3 font-geist-sans font-semibold text-2xl text-gray-900 dark:text-gray-100'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-8 mb-4 font-geist-sans font-semibold text-gray-900 text-xl dark:text-gray-100'>
|
||||
<h2 className='mt-2.5 mb-2.5 font-geist-sans font-semibold text-gray-900 text-xl dark:text-gray-100'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-7 mb-3 font-geist-sans font-semibold text-gray-900 text-lg dark:text-gray-100'>
|
||||
<h3 className='mt-2 mb-2 font-geist-sans font-semibold text-gray-900 text-lg dark:text-gray-100'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
|
||||
@@ -19,6 +19,8 @@ export function ThinkingBlock({
|
||||
}: ThinkingBlockProps) {
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [duration, setDuration] = useState(persistedDuration ?? 0)
|
||||
// Track if the user explicitly collapsed while streaming; sticky per block instance
|
||||
const userCollapsedRef = useRef<boolean>(false)
|
||||
// Keep a stable reference to start time that updates when prop changes
|
||||
const startTimeRef = useRef<number>(persistedStartTime ?? Date.now())
|
||||
useEffect(() => {
|
||||
@@ -28,13 +30,14 @@ export function ThinkingBlock({
|
||||
}, [persistedStartTime])
|
||||
|
||||
useEffect(() => {
|
||||
// Auto-collapse when streaming ends
|
||||
// Auto-collapse when streaming ends and reset userCollapsed flag
|
||||
if (!isStreaming) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
return
|
||||
}
|
||||
// Expand once there is visible content while streaming
|
||||
if (content && content.trim().length > 0) {
|
||||
// Expand once there is visible content while streaming, unless user collapsed
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, content])
|
||||
@@ -65,9 +68,16 @@ export function ThinkingBlock({
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='my-1'>
|
||||
<div className='mt-1 mb-0'>
|
||||
<button
|
||||
onClick={() => setIsExpanded((v) => !v)}
|
||||
onClick={() => {
|
||||
setIsExpanded((v) => {
|
||||
const next = !v
|
||||
// If user collapses during streaming, remember to not auto-expand again
|
||||
if (!next && isStreaming) userCollapsedRef.current = true
|
||||
return next
|
||||
})
|
||||
}}
|
||||
className={cn(
|
||||
'mb-1 inline-flex items-center gap-1 text-gray-400 text-xs transition-colors hover:text-gray-500',
|
||||
'font-normal italic'
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
ThumbsUp,
|
||||
X,
|
||||
} from 'lucide-react'
|
||||
import { InlineToolCall } from '@/lib/copilot/tools/inline-tool-call'
|
||||
import { InlineToolCall } from '@/lib/copilot/inline-tool-call'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { usePreviewStore } from '@/stores/copilot/preview-store'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
@@ -594,18 +594,14 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
if (block.type === 'tool_call') {
|
||||
// Skip hidden tools (like checkoff_todo)
|
||||
if (block.toolCall.hidden) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Visibility and filtering handled by InlineToolCall
|
||||
return (
|
||||
<div
|
||||
key={`tool-${block.toolCall.id}`}
|
||||
className='transition-opacity duration-300 ease-in-out'
|
||||
style={{ opacity: 1 }}
|
||||
>
|
||||
<InlineToolCall toolCall={block.toolCall} />
|
||||
<InlineToolCall toolCallId={block.toolCall.id} toolCall={block.toolCall} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -625,7 +621,47 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='flex justify-end'>
|
||||
<div className='flex items-center justify-end gap-0'>
|
||||
{hasCheckpoints && (
|
||||
<div className='mr-1 inline-flex items-center justify-center'>
|
||||
{showRestoreConfirmation ? (
|
||||
<div className='inline-flex items-center gap-1'>
|
||||
<button
|
||||
onClick={handleConfirmRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Confirm restore'
|
||||
aria-label='Confirm restore'
|
||||
>
|
||||
{isRevertingCheckpoint ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
) : (
|
||||
<Check className='h-3 w-3' />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleCancelRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Cancel restore'
|
||||
aria-label='Cancel restore'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={handleRevertToCheckpoint}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Restore workflow to this checkpoint state'
|
||||
aria-label='Restore'
|
||||
>
|
||||
<RotateCcw className='h-3 w-3' />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className='min-w-0 max-w-[80%]'>
|
||||
{/* Message content in purple box */}
|
||||
<div
|
||||
@@ -639,55 +675,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
<WordWrap text={message.content} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Checkpoints below message */}
|
||||
{hasCheckpoints && (
|
||||
<div className='mt-1 flex justify-end'>
|
||||
<div className='inline-flex items-center gap-0.5 text-muted-foreground text-xs'>
|
||||
<span className='select-none'>
|
||||
Restore{showRestoreConfirmation && <span className='ml-0.5'>?</span>}
|
||||
</span>
|
||||
<div className='inline-flex w-8 items-center justify-center'>
|
||||
{showRestoreConfirmation ? (
|
||||
<div className='inline-flex items-center gap-1'>
|
||||
<button
|
||||
onClick={handleConfirmRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Confirm restore'
|
||||
aria-label='Confirm restore'
|
||||
>
|
||||
{isRevertingCheckpoint ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
) : (
|
||||
<Check className='h-3 w-3' />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleCancelRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Cancel restore'
|
||||
aria-label='Cancel restore'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={handleRevertToCheckpoint}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Restore workflow to this checkpoint state'
|
||||
aria-label='Restore'
|
||||
>
|
||||
<RotateCcw className='h-3 w-3' />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -98,22 +98,22 @@ export const TodoList = memo(function TodoList({
|
||||
index !== todos.length - 1 && 'border-gray-50 border-b dark:border-gray-800'
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center rounded border transition-all',
|
||||
todo.executing
|
||||
? 'border-blue-400 dark:border-blue-500'
|
||||
: todo.completed
|
||||
{todo.executing ? (
|
||||
<div className='mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center'>
|
||||
<Loader2 className='h-3 w-3 animate-spin text-blue-500' />
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
'mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center rounded border transition-all',
|
||||
todo.completed
|
||||
? 'border-blue-500 bg-blue-500'
|
||||
: 'border-gray-300 dark:border-gray-600'
|
||||
)}
|
||||
>
|
||||
{todo.executing ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin text-blue-500' />
|
||||
) : todo.completed ? (
|
||||
<Check className='h-3 w-3 text-white' strokeWidth={3} />
|
||||
) : null}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
{todo.completed ? <Check className='h-3 w-3 text-white' strokeWidth={3} /> : null}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
|
||||
@@ -16,10 +16,10 @@ export const CopilotSlider = React.forwardRef<
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<SliderPrimitive.Track className='relative h-2 w-full grow cursor-pointer overflow-hidden rounded-full bg-input'>
|
||||
<SliderPrimitive.Track className='relative h-1.5 w-full grow cursor-pointer overflow-hidden rounded-full bg-input'>
|
||||
<SliderPrimitive.Range className='absolute h-full bg-primary' />
|
||||
</SliderPrimitive.Track>
|
||||
<SliderPrimitive.Thumb className='block h-5 w-5 cursor-pointer rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50' />
|
||||
<SliderPrimitive.Thumb className='block h-3.5 w-3.5 cursor-pointer rounded-full border border-primary/60 bg-background shadow-sm transition-all hover:shadow focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-primary/40 focus-visible:ring-offset-0 disabled:pointer-events-none disabled:opacity-50' />
|
||||
</SliderPrimitive.Root>
|
||||
))
|
||||
CopilotSlider.displayName = 'CopilotSlider'
|
||||
|
||||
@@ -120,12 +120,15 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const setMessage =
|
||||
controlledValue !== undefined ? onControlledChange || (() => {}) : setInternalMessage
|
||||
|
||||
// Auto-resize textarea
|
||||
// Auto-resize textarea and toggle vertical scroll when exceeding max height
|
||||
useEffect(() => {
|
||||
const textarea = textareaRef.current
|
||||
if (textarea) {
|
||||
const maxHeight = 120
|
||||
textarea.style.height = 'auto'
|
||||
textarea.style.height = `${Math.min(textarea.scrollHeight, 120)}px` // Max height of 120px
|
||||
const nextHeight = Math.min(textarea.scrollHeight, maxHeight)
|
||||
textarea.style.height = `${nextHeight}px`
|
||||
textarea.style.overflowY = textarea.scrollHeight > maxHeight ? 'auto' : 'hidden'
|
||||
}
|
||||
}, [message])
|
||||
|
||||
@@ -431,6 +434,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
// Depth toggle state comes from global store; access via useCopilotStore
|
||||
const { agentDepth, agentPrefetch, setAgentDepth, setAgentPrefetch } = useCopilotStore()
|
||||
|
||||
// Ensure MAX mode is off for Fast and Balanced depths
|
||||
useEffect(() => {
|
||||
if (agentDepth < 2 && !agentPrefetch) {
|
||||
setAgentPrefetch(true)
|
||||
}
|
||||
}, [agentDepth, agentPrefetch, setAgentPrefetch])
|
||||
|
||||
const cycleDepth = () => {
|
||||
// 8 modes: depths 0-3, each with prefetch off/on. Cycle depth, then toggle prefetch when wrapping.
|
||||
const nextDepth = agentDepth === 3 ? 0 : ((agentDepth + 1) as 0 | 1 | 2 | 3)
|
||||
@@ -446,24 +456,27 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
|
||||
const getDepthLabelFor = (value: 0 | 1 | 2 | 3) => {
|
||||
return value === 0 ? 'Fast' : value === 1 ? 'Balanced' : value === 2 ? 'Advanced' : 'Expert'
|
||||
return value === 0 ? 'Fast' : value === 1 ? 'Balanced' : value === 2 ? 'Advanced' : 'Behemoth'
|
||||
}
|
||||
|
||||
// Removed descriptive suffixes; concise labels only
|
||||
const getDepthDescription = (value: 0 | 1 | 2 | 3) => {
|
||||
if (value === 0)
|
||||
return 'Fastest and cheapest. Good for small edits, simple workflows, and small tasks.'
|
||||
if (value === 1) return 'Balances speed and reasoning. Good fit for most tasks.'
|
||||
return 'Fastest and cheapest. Good for small edits, simple workflows, and small tasks'
|
||||
if (value === 1) return 'Balances speed and reasoning. Good fit for most tasks'
|
||||
if (value === 2)
|
||||
return 'More reasoning for larger workflows and complex edits, still balanced for speed.'
|
||||
return 'Maximum reasoning power. Best for complex workflow building and debugging.'
|
||||
return 'More reasoning for larger workflows and complex edits, still balanced for speed'
|
||||
return 'Maximum reasoning power. Best for complex workflow building and debugging'
|
||||
}
|
||||
|
||||
const getDepthIconFor = (value: 0 | 1 | 2 | 3) => {
|
||||
if (value === 0) return <Zap className='h-3 w-3 text-muted-foreground' />
|
||||
if (value === 1) return <InfinityIcon className='h-3 w-3 text-muted-foreground' />
|
||||
if (value === 2) return <Brain className='h-3 w-3 text-muted-foreground' />
|
||||
return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
|
||||
const colorClass = !agentPrefetch
|
||||
? 'text-[var(--brand-primary-hover-hex)]'
|
||||
: 'text-muted-foreground'
|
||||
if (value === 0) return <Zap className={`h-3 w-3 ${colorClass}`} />
|
||||
if (value === 1) return <InfinityIcon className={`h-3 w-3 ${colorClass}`} />
|
||||
if (value === 2) return <Brain className={`h-3 w-3 ${colorClass}`} />
|
||||
return <BrainCircuit className={`h-3 w-3 ${colorClass}`} />
|
||||
}
|
||||
|
||||
const getDepthIcon = () => getDepthIconFor(agentDepth)
|
||||
@@ -550,7 +563,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
placeholder={isDragging ? 'Drop files here...' : placeholder}
|
||||
disabled={disabled}
|
||||
rows={1}
|
||||
className='mb-2 min-h-[32px] w-full resize-none overflow-hidden border-0 bg-transparent px-[2px] py-1 text-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
className='mb-2 min-h-[32px] w-full resize-none overflow-y-auto overflow-x-hidden border-0 bg-transparent px-[2px] py-1 text-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
style={{ height: 'auto' }}
|
||||
/>
|
||||
|
||||
@@ -636,7 +649,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='flex h-6 items-center gap-1.5 rounded-full border px-2 py-1 font-medium text-xs'
|
||||
className={cn(
|
||||
'flex h-6 items-center gap-1.5 rounded-full border px-2 py-1 font-medium text-xs',
|
||||
!agentPrefetch
|
||||
? 'border-[var(--brand-primary-hover-hex)] text-[var(--brand-primary-hover-hex)] hover:bg-[color-mix(in_srgb,var(--brand-primary-hover-hex)_8%,transparent)] hover:text-[var(--brand-primary-hover-hex)]'
|
||||
: 'border-border text-foreground'
|
||||
)}
|
||||
title='Choose mode'
|
||||
>
|
||||
{getDepthIcon()}
|
||||
@@ -666,12 +684,25 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
|
||||
>
|
||||
Significantly increases depth of reasoning
|
||||
<br />
|
||||
<span className='text-[10px] text-muted-foreground italic'>
|
||||
Only available in Advanced and Behemoth modes
|
||||
</span>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
checked={!agentPrefetch}
|
||||
onCheckedChange={(checked) => setAgentPrefetch(!checked)}
|
||||
disabled={agentDepth < 2}
|
||||
title={
|
||||
agentDepth < 2
|
||||
? 'MAX mode is only available for Advanced or Expert'
|
||||
: undefined
|
||||
}
|
||||
onCheckedChange={(checked) => {
|
||||
if (agentDepth < 2) return
|
||||
setAgentPrefetch(!checked)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div className='my-2 flex justify-center'>
|
||||
@@ -680,9 +711,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<div className='mb-3'>
|
||||
<div className='mb-2 flex items-center justify-between'>
|
||||
<span className='font-medium text-xs'>Mode</span>
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{getDepthLabelFor(agentDepth)}
|
||||
</span>
|
||||
<div className='flex items-center gap-1'>
|
||||
{getDepthIconFor(agentDepth)}
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{getDepthLabelFor(agentDepth)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className='relative'>
|
||||
<Slider
|
||||
|
||||
@@ -302,12 +302,12 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
const previewToolCall = lastMessage.toolCalls.find(
|
||||
(tc) =>
|
||||
tc.name === COPILOT_TOOL_IDS.BUILD_WORKFLOW &&
|
||||
tc.state === 'completed' &&
|
||||
tc.state === 'success' &&
|
||||
!isToolCallSeen(tc.id)
|
||||
)
|
||||
|
||||
if (previewToolCall?.result) {
|
||||
logger.info('Preview workflow completed via native SSE - handling result')
|
||||
if (previewToolCall) {
|
||||
logger.info('Preview workflow completed via native SSE')
|
||||
// Mark as seen to prevent duplicate processing
|
||||
markToolCallAsSeen(previewToolCall.id)
|
||||
// Tool call handling logic would go here if needed
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { AlertTriangle, ChevronDown, Copy, MoreVertical, Plus, Trash } from 'lucide-react'
|
||||
import {
|
||||
AlertTriangle,
|
||||
ChevronDown,
|
||||
Copy,
|
||||
Maximize2,
|
||||
Minimize2,
|
||||
MoreVertical,
|
||||
Plus,
|
||||
Trash,
|
||||
} from 'lucide-react'
|
||||
import { highlight, languages } from 'prismjs'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/themes/prism.css'
|
||||
@@ -52,6 +61,16 @@ export function Variables() {
|
||||
// Track which variables are currently being edited
|
||||
const [_activeEditors, setActiveEditors] = useState<Record<string, boolean>>({})
|
||||
|
||||
// Collapsed state per variable
|
||||
const [collapsedById, setCollapsedById] = useState<Record<string, boolean>>({})
|
||||
|
||||
const toggleCollapsed = (variableId: string) => {
|
||||
setCollapsedById((prev) => ({
|
||||
...prev,
|
||||
[variableId]: !prev[variableId],
|
||||
}))
|
||||
}
|
||||
|
||||
// Handle variable name change with validation
|
||||
const handleVariableNameChange = (variableId: string, newName: string) => {
|
||||
const validatedName = validateName(newName)
|
||||
@@ -220,7 +239,7 @@ export function Variables() {
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<ScrollArea className='h-full' hideScrollbar={false}>
|
||||
<div className='space-y-4'>
|
||||
{workflowVariables.map((variable) => (
|
||||
<div key={variable.id} className='space-y-2'>
|
||||
@@ -298,6 +317,17 @@ export function Variables() {
|
||||
align='end'
|
||||
className='min-w-32 rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => toggleCollapsed(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
{(collapsedById[variable.id] ?? false) ? (
|
||||
<Maximize2 className='mr-2 h-4 w-4 text-muted-foreground' />
|
||||
) : (
|
||||
<Minimize2 className='mr-2 h-4 w-4 text-muted-foreground' />
|
||||
)}
|
||||
{(collapsedById[variable.id] ?? false) ? 'Expand' : 'Collapse'}
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => collaborativeDuplicateVariable(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
@@ -317,71 +347,75 @@ export function Variables() {
|
||||
</div>
|
||||
|
||||
{/* Value area */}
|
||||
<div className='relative rounded-lg bg-secondary/50'>
|
||||
{/* Validation indicator */}
|
||||
{variable.value !== '' && getValidationStatus(variable) && (
|
||||
<div className='absolute top-2 right-2 z-10'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='cursor-help'>
|
||||
<AlertTriangle className='h-3 w-3 text-muted-foreground' />
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom' className='max-w-xs'>
|
||||
<p>{getValidationStatus(variable)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
{!(collapsedById[variable.id] ?? false) && (
|
||||
<div className='relative rounded-lg bg-secondary/50'>
|
||||
{/* Validation indicator */}
|
||||
{variable.value !== '' && getValidationStatus(variable) && (
|
||||
<div className='absolute top-2 right-2 z-10'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='cursor-help'>
|
||||
<AlertTriangle className='h-3 w-3 text-muted-foreground' />
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom' className='max-w-xs'>
|
||||
<p>{getValidationStatus(variable)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Editor */}
|
||||
<div className='relative overflow-hidden'>
|
||||
<div
|
||||
className='relative min-h-[36px] w-full max-w-full px-3 py-2 font-normal text-sm'
|
||||
ref={(el) => {
|
||||
editorRefs.current[variable.id] = el
|
||||
}}
|
||||
style={{ maxWidth: '100%' }}
|
||||
>
|
||||
{variable.value === '' && (
|
||||
<div className='pointer-events-none absolute inset-0 flex select-none items-start justify-start px-3 py-2 font-[380] text-muted-foreground text-sm leading-normal'>
|
||||
<div style={{ lineHeight: '20px' }}>{getPlaceholder(variable.type)}</div>
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
key={`editor-${variable.id}-${variable.type}`}
|
||||
value={formatValue(variable)}
|
||||
onValueChange={handleEditorChange.bind(null, variable)}
|
||||
onBlur={() => handleEditorBlur(variable.id)}
|
||||
onFocus={() => handleEditorFocus(variable.id)}
|
||||
highlight={(code) =>
|
||||
// Only apply syntax highlighting for non-basic text types
|
||||
variable.type === 'plain' || variable.type === 'string'
|
||||
? code
|
||||
: highlight(
|
||||
code,
|
||||
languages[getEditorLanguage(variable.type)],
|
||||
getEditorLanguage(variable.type)
|
||||
)
|
||||
}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: '20px',
|
||||
width: '100%',
|
||||
maxWidth: '100%',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-all',
|
||||
overflowWrap: 'break-word',
|
||||
minHeight: '20px',
|
||||
overflow: 'hidden',
|
||||
{/* Editor */}
|
||||
<div className='relative overflow-hidden'>
|
||||
<div
|
||||
className='relative min-h-[36px] w-full max-w-full px-3 py-2 font-normal text-sm'
|
||||
ref={(el) => {
|
||||
editorRefs.current[variable.id] = el
|
||||
}}
|
||||
className='[&>pre]:!max-w-full [&>pre]:!overflow-hidden [&>pre]:!whitespace-pre-wrap [&>pre]:!break-all [&>pre]:!overflow-wrap-break-word [&>textarea]:!max-w-full [&>textarea]:!overflow-hidden [&>textarea]:!whitespace-pre-wrap [&>textarea]:!break-all [&>textarea]:!overflow-wrap-break-word font-[380] text-foreground text-sm leading-normal focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full max-w-full whitespace-pre-wrap break-all overflow-wrap-break-word overflow-hidden font-[380] text-foreground'
|
||||
/>
|
||||
style={{ maxWidth: '100%' }}
|
||||
>
|
||||
{variable.value === '' && (
|
||||
<div className='pointer-events-none absolute inset-0 flex select-none items-start justify-start px-3 py-2 font-[380] text-muted-foreground text-sm leading-normal'>
|
||||
<div style={{ lineHeight: '20px' }}>
|
||||
{getPlaceholder(variable.type)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
key={`editor-${variable.id}-${variable.type}`}
|
||||
value={formatValue(variable)}
|
||||
onValueChange={handleEditorChange.bind(null, variable)}
|
||||
onBlur={() => handleEditorBlur(variable.id)}
|
||||
onFocus={() => handleEditorFocus(variable.id)}
|
||||
highlight={(code) =>
|
||||
// Only apply syntax highlighting for non-basic text types
|
||||
variable.type === 'plain' || variable.type === 'string'
|
||||
? code
|
||||
: highlight(
|
||||
code,
|
||||
languages[getEditorLanguage(variable.type)],
|
||||
getEditorLanguage(variable.type)
|
||||
)
|
||||
}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: '20px',
|
||||
width: '100%',
|
||||
maxWidth: '100%',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-all',
|
||||
overflowWrap: 'break-word',
|
||||
minHeight: '20px',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
className='[&>pre]:!max-w-full [&>pre]:!overflow-hidden [&>pre]:!whitespace-pre-wrap [&>pre]:!break-all [&>pre]:!overflow-wrap-break-word [&>textarea]:!max-w-full [&>textarea]:!overflow-hidden [&>textarea]:!whitespace-pre-wrap [&>textarea]:!break-all [&>textarea]:!overflow-wrap-break-word font-[380] text-foreground text-sm leading-normal focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full max-w-full whitespace-pre-wrap break-all overflow-wrap-break-word overflow-hidden font-[380] text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
|
||||
@@ -492,11 +492,7 @@ export function Panel() {
|
||||
<div className='flex-1 overflow-hidden px-3'>
|
||||
{/* Keep all tabs mounted but hidden to preserve state and animations */}
|
||||
<div style={{ display: activeTab === 'chat' ? 'block' : 'none', height: '100%' }}>
|
||||
<Chat
|
||||
panelWidth={panelWidth}
|
||||
chatMessage={chatMessage}
|
||||
setChatMessage={setChatMessage}
|
||||
/>
|
||||
<Chat chatMessage={chatMessage} setChatMessage={setChatMessage} />
|
||||
</div>
|
||||
<div style={{ display: activeTab === 'console' ? 'block' : 'none', height: '100%' }}>
|
||||
<Console panelWidth={panelWidth} />
|
||||
|
||||
@@ -18,6 +18,7 @@ interface CodeEditorProps {
|
||||
highlightVariables?: boolean
|
||||
onKeyDown?: (e: React.KeyboardEvent) => void
|
||||
disabled?: boolean
|
||||
schemaParameters?: Array<{ name: string; type: string; description: string; required: boolean }>
|
||||
}
|
||||
|
||||
export function CodeEditor({
|
||||
@@ -30,6 +31,7 @@ export function CodeEditor({
|
||||
highlightVariables = true,
|
||||
onKeyDown,
|
||||
disabled = false,
|
||||
schemaParameters = [],
|
||||
}: CodeEditorProps) {
|
||||
const [code, setCode] = useState(value)
|
||||
const [visualLineHeights, setVisualLineHeights] = useState<number[]>([])
|
||||
@@ -120,25 +122,80 @@ export function CodeEditor({
|
||||
// First, get the default Prism highlighting
|
||||
let highlighted = highlight(code, languages[language], language)
|
||||
|
||||
// Then, highlight environment variables with {{var_name}} syntax in blue
|
||||
if (highlighted.includes('{{')) {
|
||||
highlighted = highlighted.replace(
|
||||
/\{\{([^}]+)\}\}/g,
|
||||
'<span class="text-blue-500">{{$1}}</span>'
|
||||
)
|
||||
// Collect all syntax highlights to apply in a single pass
|
||||
type SyntaxHighlight = {
|
||||
start: number
|
||||
end: number
|
||||
replacement: string
|
||||
}
|
||||
const highlights: SyntaxHighlight[] = []
|
||||
|
||||
// Also highlight tags with <tag_name> syntax in blue
|
||||
if (highlighted.includes('<') && !language.includes('html')) {
|
||||
highlighted = highlighted.replace(/<([^>\s/]+)>/g, (match, group) => {
|
||||
// Avoid replacing HTML tags in comments
|
||||
if (match.startsWith('<!--') || match.includes('</')) {
|
||||
return match
|
||||
}
|
||||
return `<span class="text-blue-500"><${group}></span>`
|
||||
// Find environment variables with {{var_name}} syntax
|
||||
let match
|
||||
const envVarRegex = /\{\{([^}]+)\}\}/g
|
||||
while ((match = envVarRegex.exec(highlighted)) !== null) {
|
||||
highlights.push({
|
||||
start: match.index,
|
||||
end: match.index + match[0].length,
|
||||
replacement: `<span class="text-blue-500">${match[0]}</span>`,
|
||||
})
|
||||
}
|
||||
|
||||
// Find tags with <tag_name> syntax (not in HTML context)
|
||||
if (!language.includes('html')) {
|
||||
const tagRegex = /<([^>\s/]+)>/g
|
||||
while ((match = tagRegex.exec(highlighted)) !== null) {
|
||||
// Skip HTML comments and closing tags
|
||||
if (!match[0].startsWith('<!--') && !match[0].includes('</')) {
|
||||
const escaped = `<${match[1]}>`
|
||||
highlights.push({
|
||||
start: match.index,
|
||||
end: match.index + match[0].length,
|
||||
replacement: `<span class="text-blue-500">${escaped}</span>`,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find schema parameters as whole words
|
||||
if (schemaParameters.length > 0) {
|
||||
schemaParameters.forEach((param) => {
|
||||
// Escape special regex characters in parameter name
|
||||
const escapedName = param.name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
const paramRegex = new RegExp(`\\b(${escapedName})\\b`, 'g')
|
||||
while ((match = paramRegex.exec(highlighted)) !== null) {
|
||||
// Check if this position is already inside an HTML tag
|
||||
// by looking for unclosed < before this position
|
||||
let insideTag = false
|
||||
let pos = match.index - 1
|
||||
while (pos >= 0) {
|
||||
if (highlighted[pos] === '>') break
|
||||
if (highlighted[pos] === '<') {
|
||||
insideTag = true
|
||||
break
|
||||
}
|
||||
pos--
|
||||
}
|
||||
|
||||
if (!insideTag) {
|
||||
highlights.push({
|
||||
start: match.index,
|
||||
end: match.index + match[0].length,
|
||||
replacement: `<span class="text-green-600 font-medium">${match[0]}</span>`,
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Sort highlights by start position (reverse order to maintain positions)
|
||||
highlights.sort((a, b) => b.start - a.start)
|
||||
|
||||
// Apply all highlights
|
||||
highlights.forEach(({ start, end, replacement }) => {
|
||||
highlighted = highlighted.slice(0, start) + replacement + highlighted.slice(end)
|
||||
})
|
||||
|
||||
return highlighted
|
||||
}
|
||||
|
||||
@@ -204,12 +261,17 @@ export function CodeEditor({
|
||||
disabled={disabled}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
minHeight: '46px',
|
||||
minHeight: minHeight,
|
||||
lineHeight: '21px',
|
||||
height: '100%',
|
||||
}}
|
||||
className={cn('focus:outline-none', isCollapsed && 'pointer-events-none select-none')}
|
||||
className={cn(
|
||||
'h-full focus:outline-none',
|
||||
isCollapsed && 'pointer-events-none select-none'
|
||||
)}
|
||||
textareaClassName={cn(
|
||||
'focus:outline-none focus:ring-0 bg-transparent',
|
||||
'!min-h-full !h-full resize-none !block',
|
||||
(isCollapsed || disabled) && 'pointer-events-none'
|
||||
)}
|
||||
/>
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
import { checkEnvVarTrigger, EnvVarDropdown } from '@/components/ui/env-var-dropdown'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { WandPromptBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/wand-prompt-bar/wand-prompt-bar'
|
||||
@@ -36,6 +37,7 @@ interface CustomToolModalProps {
|
||||
onOpenChange: (open: boolean) => void
|
||||
onSave: (tool: CustomTool) => void
|
||||
onDelete?: (toolId: string) => void
|
||||
blockId: string
|
||||
initialValues?: {
|
||||
id?: string
|
||||
schema: any
|
||||
@@ -61,6 +63,7 @@ export function CustomToolModal({
|
||||
onOpenChange,
|
||||
onSave,
|
||||
onDelete,
|
||||
blockId,
|
||||
initialValues,
|
||||
}: CustomToolModalProps) {
|
||||
const [activeSection, setActiveSection] = useState<ToolSection>('schema')
|
||||
@@ -237,12 +240,16 @@ try {
|
||||
// Environment variables and tags dropdown state
|
||||
const [showEnvVars, setShowEnvVars] = useState(false)
|
||||
const [showTags, setShowTags] = useState(false)
|
||||
const [showSchemaParams, setShowSchemaParams] = useState(false)
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [cursorPosition, setCursorPosition] = useState(0)
|
||||
const codeEditorRef = useRef<HTMLDivElement>(null)
|
||||
const schemaParamsDropdownRef = useRef<HTMLDivElement>(null)
|
||||
const [activeSourceBlockId, setActiveSourceBlockId] = useState<string | null>(null)
|
||||
// Add state for dropdown positioning
|
||||
const [dropdownPosition, setDropdownPosition] = useState({ top: 0, left: 0 })
|
||||
// Schema params keyboard navigation
|
||||
const [schemaParamSelectedIndex, setSchemaParamSelectedIndex] = useState(0)
|
||||
|
||||
const addTool = useCustomToolsStore((state) => state.addTool)
|
||||
const updateTool = useCustomToolsStore((state) => state.updateTool)
|
||||
@@ -270,6 +277,21 @@ try {
|
||||
}
|
||||
}, [open, initialValues])
|
||||
|
||||
// Close schema params dropdown on outside click
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (
|
||||
schemaParamsDropdownRef.current &&
|
||||
!schemaParamsDropdownRef.current.contains(event.target as Node)
|
||||
) {
|
||||
setShowSchemaParams(false)
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside)
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside)
|
||||
}, [])
|
||||
|
||||
const resetForm = () => {
|
||||
setJsonSchema('')
|
||||
setFunctionCode('')
|
||||
@@ -309,6 +331,15 @@ try {
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate that parameters object exists with correct structure
|
||||
if (!parsed.function.parameters) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (!parsed.function.parameters.type || parsed.function.parameters.properties === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (_error) {
|
||||
return false
|
||||
@@ -320,6 +351,25 @@ try {
|
||||
return true // Allow empty code
|
||||
}
|
||||
|
||||
// Extract parameters from JSON schema for autocomplete
|
||||
const schemaParameters = useMemo(() => {
|
||||
try {
|
||||
if (!jsonSchema) return []
|
||||
const parsed = JSON.parse(jsonSchema)
|
||||
const properties = parsed?.function?.parameters?.properties
|
||||
if (!properties) return []
|
||||
|
||||
return Object.keys(properties).map((key) => ({
|
||||
name: key,
|
||||
type: properties[key].type || 'any',
|
||||
description: properties[key].description || '',
|
||||
required: parsed?.function?.parameters?.required?.includes(key) || false,
|
||||
}))
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}, [jsonSchema])
|
||||
|
||||
// Memoize validation results to prevent unnecessary recalculations
|
||||
const isSchemaValid = useMemo(() => validateJsonSchema(jsonSchema), [jsonSchema])
|
||||
const isCodeValid = useMemo(() => validateFunctionCode(functionCode), [functionCode])
|
||||
@@ -350,6 +400,34 @@ try {
|
||||
return
|
||||
}
|
||||
|
||||
// Validate parameters structure - must be present
|
||||
if (!parsed.function.parameters) {
|
||||
setSchemaError('Missing function.parameters object')
|
||||
setActiveSection('schema')
|
||||
return
|
||||
}
|
||||
|
||||
if (!parsed.function.parameters.type) {
|
||||
setSchemaError('Missing parameters.type field')
|
||||
setActiveSection('schema')
|
||||
return
|
||||
}
|
||||
|
||||
if (parsed.function.parameters.properties === undefined) {
|
||||
setSchemaError('Missing parameters.properties field')
|
||||
setActiveSection('schema')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
typeof parsed.function.parameters.properties !== 'object' ||
|
||||
parsed.function.parameters.properties === null
|
||||
) {
|
||||
setSchemaError('parameters.properties must be an object')
|
||||
setActiveSection('schema')
|
||||
return
|
||||
}
|
||||
|
||||
// Check for duplicate tool name
|
||||
const toolName = parsed.function.name
|
||||
const customToolsStore = useCustomToolsStore.getState()
|
||||
@@ -439,7 +517,52 @@ try {
|
||||
// Prevent updates during AI generation/streaming
|
||||
if (schemaGeneration.isLoading || schemaGeneration.isStreaming) return
|
||||
setJsonSchema(value)
|
||||
if (schemaError) {
|
||||
|
||||
// Real-time validation - show error immediately when schema is invalid
|
||||
if (value.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(value)
|
||||
|
||||
if (!parsed.type || parsed.type !== 'function') {
|
||||
setSchemaError('Missing "type": "function"')
|
||||
return
|
||||
}
|
||||
|
||||
if (!parsed.function || !parsed.function.name) {
|
||||
setSchemaError('Missing function.name field')
|
||||
return
|
||||
}
|
||||
|
||||
if (!parsed.function.parameters) {
|
||||
setSchemaError('Missing function.parameters object')
|
||||
return
|
||||
}
|
||||
|
||||
if (!parsed.function.parameters.type) {
|
||||
setSchemaError('Missing parameters.type field')
|
||||
return
|
||||
}
|
||||
|
||||
if (parsed.function.parameters.properties === undefined) {
|
||||
setSchemaError('Missing parameters.properties field')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
typeof parsed.function.parameters.properties !== 'object' ||
|
||||
parsed.function.parameters.properties === null
|
||||
) {
|
||||
setSchemaError('parameters.properties must be an object')
|
||||
return
|
||||
}
|
||||
|
||||
// Schema is valid, clear any existing error
|
||||
setSchemaError(null)
|
||||
} catch {
|
||||
setSchemaError('Invalid JSON format')
|
||||
}
|
||||
} else {
|
||||
// Clear error when schema is empty (will be caught during save)
|
||||
setSchemaError(null)
|
||||
}
|
||||
}
|
||||
@@ -499,9 +622,40 @@ try {
|
||||
if (!tagTrigger.show) {
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
// Show/hide schema parameters dropdown based on typing context
|
||||
if (!codeGeneration.isStreaming && schemaParameters.length > 0) {
|
||||
const schemaParamTrigger = checkSchemaParamTrigger(value, pos, schemaParameters)
|
||||
if (schemaParamTrigger.show && !showSchemaParams) {
|
||||
setShowSchemaParams(true)
|
||||
setSchemaParamSelectedIndex(0)
|
||||
} else if (!schemaParamTrigger.show && showSchemaParams) {
|
||||
setShowSchemaParams(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Function to check if we should show schema parameters dropdown
|
||||
const checkSchemaParamTrigger = (text: string, cursorPos: number, parameters: any[]) => {
|
||||
if (parameters.length === 0) return { show: false, searchTerm: '' }
|
||||
|
||||
// Look for partial parameter names after common patterns like 'const ', '= ', etc.
|
||||
const beforeCursor = text.substring(0, cursorPos)
|
||||
const words = beforeCursor.split(/[\s=();,{}[\]]+/)
|
||||
const currentWord = words[words.length - 1] || ''
|
||||
|
||||
// Show dropdown if typing and current word could be a parameter
|
||||
if (currentWord.length > 0 && /^[a-zA-Z_][\w]*$/.test(currentWord)) {
|
||||
const matchingParams = parameters.filter((param) =>
|
||||
param.name.toLowerCase().startsWith(currentWord.toLowerCase())
|
||||
)
|
||||
return { show: matchingParams.length > 0, searchTerm: currentWord, matches: matchingParams }
|
||||
}
|
||||
|
||||
return { show: false, searchTerm: '' }
|
||||
}
|
||||
|
||||
// Handle environment variable selection
|
||||
const handleEnvVarSelect = (newValue: string) => {
|
||||
setFunctionCode(newValue)
|
||||
@@ -515,6 +669,32 @@ try {
|
||||
setActiveSourceBlockId(null)
|
||||
}
|
||||
|
||||
// Handle schema parameter selection
|
||||
const handleSchemaParamSelect = (paramName: string) => {
|
||||
const textarea = codeEditorRef.current?.querySelector('textarea')
|
||||
if (textarea) {
|
||||
const pos = textarea.selectionStart
|
||||
const beforeCursor = functionCode.substring(0, pos)
|
||||
const afterCursor = functionCode.substring(pos)
|
||||
|
||||
// Find the start of the current word
|
||||
const words = beforeCursor.split(/[\s=();,{}[\]]+/)
|
||||
const currentWord = words[words.length - 1] || ''
|
||||
const wordStart = beforeCursor.lastIndexOf(currentWord)
|
||||
|
||||
// Replace the current partial word with the selected parameter
|
||||
const newValue = beforeCursor.substring(0, wordStart) + paramName + afterCursor
|
||||
setFunctionCode(newValue)
|
||||
setShowSchemaParams(false)
|
||||
|
||||
// Set cursor position after the inserted parameter
|
||||
setTimeout(() => {
|
||||
textarea.focus()
|
||||
textarea.setSelectionRange(wordStart + paramName.length, wordStart + paramName.length)
|
||||
}, 0)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle key press events
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
// Allow AI prompt interaction (e.g., Escape to close prompt bar)
|
||||
@@ -535,10 +715,14 @@ try {
|
||||
e.stopPropagation()
|
||||
return
|
||||
}
|
||||
// Close dropdowns only if AI prompt isn't active
|
||||
if (!showEnvVars && !showTags) {
|
||||
// Close dropdowns first, only close modal if no dropdowns are open
|
||||
if (showEnvVars || showTags || showSchemaParams) {
|
||||
setShowEnvVars(false)
|
||||
setShowTags(false)
|
||||
setShowSchemaParams(false)
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -552,7 +736,37 @@ try {
|
||||
return
|
||||
}
|
||||
|
||||
// Let dropdowns handle their own keyboard events if visible
|
||||
// Handle schema parameters dropdown keyboard navigation
|
||||
if (showSchemaParams && schemaParameters.length > 0) {
|
||||
switch (e.key) {
|
||||
case 'ArrowDown':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setSchemaParamSelectedIndex((prev) => Math.min(prev + 1, schemaParameters.length - 1))
|
||||
break
|
||||
case 'ArrowUp':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setSchemaParamSelectedIndex((prev) => Math.max(prev - 1, 0))
|
||||
break
|
||||
case 'Enter':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
if (schemaParamSelectedIndex >= 0 && schemaParamSelectedIndex < schemaParameters.length) {
|
||||
const selectedParam = schemaParameters[schemaParamSelectedIndex]
|
||||
handleSchemaParamSelect(selectedParam.name)
|
||||
}
|
||||
break
|
||||
case 'Escape':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setShowSchemaParams(false)
|
||||
break
|
||||
}
|
||||
return // Don't handle other dropdown events when schema params is active
|
||||
}
|
||||
|
||||
// Let other dropdowns handle their own keyboard events if visible
|
||||
if (showEnvVars || showTags) {
|
||||
if (['ArrowDown', 'ArrowUp', 'Enter'].includes(e.key)) {
|
||||
e.preventDefault()
|
||||
@@ -619,6 +833,16 @@ try {
|
||||
<DialogContent
|
||||
className='flex h-[80vh] flex-col gap-0 p-0 sm:max-w-[700px]'
|
||||
hideCloseButton
|
||||
onKeyDown={(e) => {
|
||||
// Intercept Escape key when dropdowns are open
|
||||
if (e.key === 'Escape' && (showEnvVars || showTags || showSchemaParams)) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setShowEnvVars(false)
|
||||
setShowTags(false)
|
||||
setShowSchemaParams(false)
|
||||
}
|
||||
}}
|
||||
>
|
||||
<DialogHeader className='border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
@@ -729,7 +953,7 @@ try {
|
||||
</div>
|
||||
{schemaError &&
|
||||
!schemaGeneration.isStreaming && ( // Hide schema error while streaming
|
||||
<span className='ml-4 flex-shrink-0 text-red-600 text-sm'>{schemaError}</span>
|
||||
<div className='ml-4 break-words text-red-600 text-sm'>{schemaError}</div>
|
||||
)}
|
||||
</div>
|
||||
<CodeEditor
|
||||
@@ -799,9 +1023,25 @@ try {
|
||||
</div>
|
||||
{codeError &&
|
||||
!codeGeneration.isStreaming && ( // Hide code error while streaming
|
||||
<span className='ml-4 flex-shrink-0 text-red-600 text-sm'>{codeError}</span>
|
||||
<div className='ml-4 break-words text-red-600 text-sm'>{codeError}</div>
|
||||
)}
|
||||
</div>
|
||||
{schemaParameters.length > 0 && (
|
||||
<div className='mb-2 rounded-md bg-muted/50 p-2'>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
<span className='font-medium'>Available parameters:</span>{' '}
|
||||
{schemaParameters.map((param, index) => (
|
||||
<span key={param.name}>
|
||||
<code className='rounded bg-background px-1 py-0.5 text-foreground'>
|
||||
{param.name}
|
||||
</code>
|
||||
{index < schemaParameters.length - 1 && ', '}
|
||||
</span>
|
||||
))}
|
||||
{'. '}Start typing a parameter name for autocomplete.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
<div ref={codeEditorRef} className='relative'>
|
||||
<CodeEditor
|
||||
value={functionCode}
|
||||
@@ -819,6 +1059,7 @@ try {
|
||||
highlightVariables={true}
|
||||
disabled={codeGeneration.isLoading || codeGeneration.isStreaming} // Use disabled prop instead of readOnly
|
||||
onKeyDown={handleKeyDown} // Pass keydown handler
|
||||
schemaParameters={schemaParameters} // Pass schema parameters for highlighting
|
||||
/>
|
||||
|
||||
{/* Environment variables dropdown */}
|
||||
@@ -847,7 +1088,7 @@ try {
|
||||
<TagDropdown
|
||||
visible={showTags}
|
||||
onSelect={handleTagSelect}
|
||||
blockId=''
|
||||
blockId={blockId}
|
||||
activeSourceBlockId={activeSourceBlockId}
|
||||
inputValue={functionCode}
|
||||
cursorPosition={cursorPosition}
|
||||
@@ -863,6 +1104,49 @@ try {
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Schema parameters dropdown */}
|
||||
{showSchemaParams && schemaParameters.length > 0 && (
|
||||
<div
|
||||
ref={schemaParamsDropdownRef}
|
||||
className='absolute z-[9999] mt-1 w-64 overflow-visible rounded-md border bg-popover shadow-md'
|
||||
style={{
|
||||
top: `${dropdownPosition.top}px`,
|
||||
left: `${dropdownPosition.left}px`,
|
||||
}}
|
||||
>
|
||||
<div className='py-1'>
|
||||
<div className='px-2 pt-2.5 pb-0.5 font-medium text-muted-foreground text-xs'>
|
||||
Available Parameters
|
||||
</div>
|
||||
<div>
|
||||
{schemaParameters.map((param, index) => (
|
||||
<button
|
||||
key={param.name}
|
||||
onClick={() => handleSchemaParamSelect(param.name)}
|
||||
onMouseEnter={() => setSchemaParamSelectedIndex(index)}
|
||||
className={cn(
|
||||
'flex w-full items-center gap-2 px-3 py-1.5 text-left text-sm',
|
||||
'hover:bg-accent hover:text-accent-foreground',
|
||||
'focus:bg-accent focus:text-accent-foreground focus:outline-none',
|
||||
index === schemaParamSelectedIndex &&
|
||||
'bg-accent text-accent-foreground'
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className='flex h-5 w-5 items-center justify-center rounded'
|
||||
style={{ backgroundColor: '#2F8BFF' }}
|
||||
>
|
||||
<span className='h-3 w-3 font-bold text-white text-xs'>P</span>
|
||||
</div>
|
||||
<span className='flex-1 truncate'>{param.name}</span>
|
||||
<span className='text-muted-foreground text-xs'>{param.type}</span>
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className='h-6' />
|
||||
</div>
|
||||
@@ -899,11 +1183,27 @@ try {
|
||||
Cancel
|
||||
</Button>
|
||||
{activeSection === 'schema' ? (
|
||||
<Button onClick={() => setActiveSection('code')} disabled={!isSchemaValid}>
|
||||
<Button
|
||||
onClick={() => setActiveSection('code')}
|
||||
disabled={!isSchemaValid || !!schemaError}
|
||||
>
|
||||
Next
|
||||
</Button>
|
||||
) : (
|
||||
<Button onClick={handleSave}>{isEditing ? 'Update Tool' : 'Save Tool'}</Button>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button onClick={handleSave} disabled={!isSchemaValid}>
|
||||
{isEditing ? 'Update Tool' : 'Save Tool'}
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
{!isSchemaValid && (
|
||||
<TooltipContent side='top'>
|
||||
<p>Invalid JSON schema</p>
|
||||
</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1320,7 +1320,7 @@ export function ToolInput({
|
||||
|
||||
// For custom tools, extract parameters from schema
|
||||
const customToolParams =
|
||||
isCustomTool && tool.schema
|
||||
isCustomTool && tool.schema && tool.schema.function?.parameters?.properties
|
||||
? Object.entries(tool.schema.function.parameters.properties || {}).map(
|
||||
([paramId, param]: [string, any]) => ({
|
||||
id: paramId,
|
||||
@@ -1824,6 +1824,7 @@ export function ToolInput({
|
||||
}}
|
||||
onSave={editingToolIndex !== null ? handleSaveCustomTool : handleAddCustomTool}
|
||||
onDelete={handleDeleteTool}
|
||||
blockId={blockId}
|
||||
initialValues={
|
||||
editingToolIndex !== null && selectedTools[editingToolIndex]?.type === 'custom-tool'
|
||||
? {
|
||||
|
||||
@@ -50,7 +50,9 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
// Create the abstracted interface
|
||||
const currentWorkflow = useMemo((): CurrentWorkflow => {
|
||||
// Determine which workflow to use - only use diff if it's ready
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && !!diffWorkflow
|
||||
const hasDiffBlocks =
|
||||
!!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
|
||||
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
|
||||
|
||||
return {
|
||||
|
||||
@@ -279,15 +279,17 @@ export function useWorkflowExecution() {
|
||||
|
||||
// Handle file uploads if present
|
||||
const uploadedFiles: any[] = []
|
||||
console.log('Checking for files to upload:', workflowInput.files)
|
||||
interface UploadErrorCapableInput {
|
||||
onUploadError: (message: string) => void
|
||||
}
|
||||
const isUploadErrorCapable = (value: unknown): value is UploadErrorCapableInput =>
|
||||
!!value &&
|
||||
typeof value === 'object' &&
|
||||
'onUploadError' in (value as any) &&
|
||||
typeof (value as any).onUploadError === 'function'
|
||||
if (workflowInput.files && Array.isArray(workflowInput.files)) {
|
||||
try {
|
||||
console.log('Processing files for upload:', workflowInput.files.length)
|
||||
|
||||
for (const fileData of workflowInput.files) {
|
||||
console.log('Uploading file:', fileData.name, fileData.size)
|
||||
console.log('File data:', fileData)
|
||||
|
||||
// Create FormData for upload
|
||||
const formData = new FormData()
|
||||
formData.append('file', fileData.file)
|
||||
@@ -303,8 +305,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
if (response.ok) {
|
||||
const uploadResult = await response.json()
|
||||
console.log('Upload successful:', uploadResult)
|
||||
|
||||
// Convert upload result to clean UserFile format
|
||||
const processUploadResult = (result: any) => ({
|
||||
id:
|
||||
@@ -327,23 +327,28 @@ export function useWorkflowExecution() {
|
||||
// Single file upload - the result IS the file object
|
||||
uploadedFiles.push(processUploadResult(uploadResult))
|
||||
} else {
|
||||
console.error('Unexpected upload response format:', uploadResult)
|
||||
logger.error('Unexpected upload response format:', uploadResult)
|
||||
}
|
||||
} else {
|
||||
const errorText = await response.text()
|
||||
console.error(
|
||||
`Failed to upload file ${fileData.name}:`,
|
||||
response.status,
|
||||
errorText
|
||||
)
|
||||
const message = `Failed to upload ${fileData.name}: ${response.status} ${errorText}`
|
||||
logger.error(message)
|
||||
if (isUploadErrorCapable(workflowInput)) {
|
||||
try {
|
||||
workflowInput.onUploadError(message)
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('All files processed. Uploaded files:', uploadedFiles)
|
||||
// Update workflow input with uploaded files
|
||||
workflowInput.files = uploadedFiles
|
||||
} catch (error) {
|
||||
console.error('Error uploading files:', error)
|
||||
logger.error('Error uploading files:', error)
|
||||
if (isUploadErrorCapable(workflowInput)) {
|
||||
try {
|
||||
workflowInput.onUploadError('Unexpected error uploading files')
|
||||
} catch {}
|
||||
}
|
||||
// Continue execution even if file upload fails
|
||||
workflowInput.files = []
|
||||
}
|
||||
|
||||
@@ -65,7 +65,8 @@ export function getWorkflowExecutionContext(): WorkflowExecutionContext {
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore.getState()
|
||||
|
||||
// Determine which workflow to use - same logic as useCurrentWorkflow
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && !!diffWorkflow
|
||||
const hasDiffBlocks = !!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
|
||||
const currentWorkflow = shouldUseDiff ? diffWorkflow : workflowState
|
||||
|
||||
const { getAllVariables } = useEnvironmentStore.getState()
|
||||
|
||||
@@ -352,11 +352,13 @@ export const isPointInLoopNode = (
|
||||
const containingNodes = getNodes()
|
||||
.filter((n) => isContainerType(n.type))
|
||||
.filter((n) => {
|
||||
// Use absolute coordinates for nested containers
|
||||
const absolutePos = getNodeAbsolutePosition(n.id, getNodes)
|
||||
const rect = {
|
||||
left: n.position.x,
|
||||
right: n.position.x + (n.data?.width || DEFAULT_CONTAINER_WIDTH),
|
||||
top: n.position.y,
|
||||
bottom: n.position.y + (n.data?.height || DEFAULT_CONTAINER_HEIGHT),
|
||||
left: absolutePos.x,
|
||||
right: absolutePos.x + (n.data?.width || DEFAULT_CONTAINER_WIDTH),
|
||||
top: absolutePos.y,
|
||||
bottom: absolutePos.y + (n.data?.height || DEFAULT_CONTAINER_HEIGHT),
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -368,7 +370,8 @@ export const isPointInLoopNode = (
|
||||
})
|
||||
.map((n) => ({
|
||||
loopId: n.id,
|
||||
loopPosition: n.position,
|
||||
// Return absolute position so callers can compute relative placement correctly
|
||||
loopPosition: getNodeAbsolutePosition(n.id, getNodes),
|
||||
dimensions: {
|
||||
width: n.data?.width || DEFAULT_CONTAINER_WIDTH,
|
||||
height: n.data?.height || DEFAULT_CONTAINER_HEIGHT,
|
||||
|
||||
@@ -281,6 +281,41 @@ const WorkflowContent = React.memo(() => {
|
||||
[getNodes]
|
||||
)
|
||||
|
||||
// Compute the absolute position of a node's source anchor (right-middle)
|
||||
const getNodeAnchorPosition = useCallback(
|
||||
(nodeId: string): { x: number; y: number } => {
|
||||
const node = getNodes().find((n) => n.id === nodeId)
|
||||
const absPos = getNodeAbsolutePositionWrapper(nodeId)
|
||||
|
||||
if (!node) {
|
||||
return absPos
|
||||
}
|
||||
|
||||
// Use known defaults per node type without type casting
|
||||
const isSubflow = node.type === 'subflowNode'
|
||||
const width = isSubflow
|
||||
? typeof node.data?.width === 'number'
|
||||
? node.data.width
|
||||
: 500
|
||||
: typeof node.width === 'number'
|
||||
? node.width
|
||||
: 350
|
||||
const height = isSubflow
|
||||
? typeof node.data?.height === 'number'
|
||||
? node.data.height
|
||||
: 300
|
||||
: typeof node.height === 'number'
|
||||
? node.height
|
||||
: 100
|
||||
|
||||
return {
|
||||
x: absPos.x + width,
|
||||
y: absPos.y + height / 2,
|
||||
}
|
||||
},
|
||||
[getNodes, getNodeAbsolutePositionWrapper]
|
||||
)
|
||||
|
||||
// Auto-layout handler - now uses frontend auto layout for immediate updates
|
||||
const handleAutoLayout = useCallback(async () => {
|
||||
if (Object.keys(blocks).length === 0) return
|
||||
@@ -373,22 +408,37 @@ const WorkflowContent = React.memo(() => {
|
||||
// Handle drops
|
||||
const findClosestOutput = useCallback(
|
||||
(newNodePosition: { x: number; y: number }): BlockData | null => {
|
||||
const existingBlocks = Object.entries(blocks)
|
||||
.filter(([_, block]) => block.enabled)
|
||||
.map(([id, block]) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
position: block.position,
|
||||
distance: Math.sqrt(
|
||||
(block.position.x - newNodePosition.x) ** 2 +
|
||||
(block.position.y - newNodePosition.y) ** 2
|
||||
),
|
||||
}))
|
||||
// Determine if drop is inside a container; if not, exclude child nodes from candidates
|
||||
const containerAtPoint = isPointInLoopNodeWrapper(newNodePosition)
|
||||
const nodeIndex = new Map(getNodes().map((n) => [n.id, n]))
|
||||
|
||||
const candidates = Object.entries(blocks)
|
||||
.filter(([id, block]) => {
|
||||
if (!block.enabled) return false
|
||||
const node = nodeIndex.get(id)
|
||||
if (!node) return false
|
||||
|
||||
// If dropping outside containers, ignore blocks that are inside a container
|
||||
if (!containerAtPoint && node.parentId) return false
|
||||
return true
|
||||
})
|
||||
.map(([id, block]) => {
|
||||
const anchor = getNodeAnchorPosition(id)
|
||||
const distance = Math.sqrt(
|
||||
(anchor.x - newNodePosition.x) ** 2 + (anchor.y - newNodePosition.y) ** 2
|
||||
)
|
||||
return {
|
||||
id,
|
||||
type: block.type,
|
||||
position: anchor,
|
||||
distance,
|
||||
}
|
||||
})
|
||||
.sort((a, b) => a.distance - b.distance)
|
||||
|
||||
return existingBlocks[0] || null
|
||||
return candidates[0] || null
|
||||
},
|
||||
[blocks]
|
||||
[blocks, getNodes, getNodeAnchorPosition, isPointInLoopNodeWrapper]
|
||||
)
|
||||
|
||||
// Determine the appropriate source handle based on block type
|
||||
@@ -667,6 +717,11 @@ const WorkflowContent = React.memo(() => {
|
||||
y: position.y - containerInfo.loopPosition.y,
|
||||
}
|
||||
|
||||
// Capture existing child blocks before adding the new one
|
||||
const existingChildBlocks = Object.values(blocks).filter(
|
||||
(b) => b.data?.parentId === containerInfo.loopId
|
||||
)
|
||||
|
||||
// Add block with parent info
|
||||
addBlock(id, data.type, name, relativePosition, {
|
||||
parentId: containerInfo.loopId,
|
||||
@@ -680,12 +735,35 @@ const WorkflowContent = React.memo(() => {
|
||||
// Auto-connect logic for blocks inside containers
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
if (isAutoConnectEnabled && data.type !== 'starter') {
|
||||
// First priority: Connect to the container's start node
|
||||
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
|
||||
const containerType = containerNode?.type
|
||||
if (existingChildBlocks.length > 0) {
|
||||
// Connect to the nearest existing child block within the container
|
||||
const closestBlock = existingChildBlocks
|
||||
.map((b) => ({
|
||||
block: b,
|
||||
distance: Math.sqrt(
|
||||
(b.position.x - relativePosition.x) ** 2 +
|
||||
(b.position.y - relativePosition.y) ** 2
|
||||
),
|
||||
}))
|
||||
.sort((a, b) => a.distance - b.distance)[0]?.block
|
||||
|
||||
if (containerType === 'subflowNode') {
|
||||
// Connect from the container's start node to the new block
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle({
|
||||
id: closestBlock.id,
|
||||
type: closestBlock.type,
|
||||
})
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No existing children: connect from the container's start handle
|
||||
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
|
||||
const startSourceHandle =
|
||||
(containerNode?.data as any)?.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
@@ -699,45 +777,6 @@ const WorkflowContent = React.memo(() => {
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
} else {
|
||||
// Fallback: Try to find other nodes in the container to connect to
|
||||
const containerNodes = getNodes().filter((n) => n.parentId === containerInfo.loopId)
|
||||
|
||||
if (containerNodes.length > 0) {
|
||||
// Connect to the closest node in the container
|
||||
const closestNode = containerNodes
|
||||
.map((n) => ({
|
||||
id: n.id,
|
||||
distance: Math.sqrt(
|
||||
(n.position.x - relativePosition.x) ** 2 +
|
||||
(n.position.y - relativePosition.y) ** 2
|
||||
),
|
||||
}))
|
||||
.sort((a, b) => a.distance - b.distance)[0]
|
||||
|
||||
if (closestNode) {
|
||||
// Get appropriate source handle
|
||||
const sourceNode = getNodes().find((n) => n.id === closestNode.id)
|
||||
const sourceType = sourceNode?.data?.type
|
||||
|
||||
// Default source handle
|
||||
let sourceHandle = 'source'
|
||||
|
||||
// For condition blocks, use the condition-true handle
|
||||
if (sourceType === 'condition') {
|
||||
sourceHandle = 'condition-true'
|
||||
}
|
||||
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
source: closestNode.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -1396,8 +1435,69 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Update the node's parent relationship
|
||||
if (potentialParentId) {
|
||||
// Compute relative position BEFORE updating parent to avoid stale state
|
||||
const containerAbsPosBefore = getNodeAbsolutePositionWrapper(potentialParentId)
|
||||
const nodeAbsPosBefore = getNodeAbsolutePositionWrapper(node.id)
|
||||
const relativePositionBefore = {
|
||||
x: nodeAbsPosBefore.x - containerAbsPosBefore.x,
|
||||
y: nodeAbsPosBefore.y - containerAbsPosBefore.y,
|
||||
}
|
||||
|
||||
// Moving to a new parent container
|
||||
updateNodeParent(node.id, potentialParentId)
|
||||
|
||||
// Auto-connect when moving an existing block into a container
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
if (isAutoConnectEnabled) {
|
||||
// Existing children in the target container (excluding the moved node)
|
||||
const existingChildBlocks = Object.values(blocks).filter(
|
||||
(b) => b.data?.parentId === potentialParentId && b.id !== node.id
|
||||
)
|
||||
|
||||
if (existingChildBlocks.length > 0) {
|
||||
// Connect from nearest existing child inside the container
|
||||
const closestBlock = existingChildBlocks
|
||||
.map((b) => ({
|
||||
block: b,
|
||||
distance: Math.sqrt(
|
||||
(b.position.x - relativePositionBefore.x) ** 2 +
|
||||
(b.position.y - relativePositionBefore.y) ** 2
|
||||
),
|
||||
}))
|
||||
.sort((a, b) => a.distance - b.distance)[0]?.block
|
||||
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle({
|
||||
id: closestBlock.id,
|
||||
type: closestBlock.type,
|
||||
})
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: node.id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No children: connect from the container's start handle to the moved node
|
||||
const containerNode = getNodes().find((n) => n.id === potentialParentId)
|
||||
const startSourceHandle =
|
||||
(containerNode?.data as any)?.kind === 'loop'
|
||||
? 'loop-start-source'
|
||||
: 'parallel-start-source'
|
||||
|
||||
addEdge({
|
||||
id: crypto.randomUUID(),
|
||||
source: potentialParentId,
|
||||
target: node.id,
|
||||
sourceHandle: startSourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset state
|
||||
@@ -1411,6 +1511,10 @@ const WorkflowContent = React.memo(() => {
|
||||
updateNodeParent,
|
||||
getNodeHierarchyWrapper,
|
||||
collaborativeUpdateBlockPosition,
|
||||
addEdge,
|
||||
determineSourceHandle,
|
||||
blocks,
|
||||
getNodeAbsolutePositionWrapper,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
255
apps/sim/blocks/blocks/mysql.ts
Normal file
255
apps/sim/blocks/blocks/mysql.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import { MySQLIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { MySQLResponse } from '@/tools/mysql/types'
|
||||
|
||||
export const MySQLBlock: BlockConfig<MySQLResponse> = {
|
||||
type: 'mysql',
|
||||
name: 'MySQL',
|
||||
description: 'Connect to MySQL database',
|
||||
longDescription:
|
||||
'Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
|
||||
docsLink: 'https://docs.sim.ai/tools/mysql',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: MySQLIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Query (SELECT)', id: 'query' },
|
||||
{ label: 'Insert Data', id: 'insert' },
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
{
|
||||
id: 'host',
|
||||
title: 'Host',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'localhost or your.database.host',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'port',
|
||||
title: 'Port',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: '3306',
|
||||
value: () => '3306',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'database',
|
||||
title: 'Database Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'your_database',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'root',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
password: true,
|
||||
placeholder: 'Your database password',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ssl',
|
||||
title: 'SSL Mode',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
{ label: 'Required', id: 'required' },
|
||||
{ label: 'Preferred', id: 'preferred' },
|
||||
],
|
||||
value: () => 'preferred',
|
||||
},
|
||||
// Table field for insert/update/delete operations
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
// SQL Query field
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM users WHERE active = true',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM table_name',
|
||||
condition: { field: 'operation', value: 'execute' },
|
||||
required: true,
|
||||
},
|
||||
// Data for insert operations
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
// Set clause for updates
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Update Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
// Where clause for update/delete
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'query':
|
||||
return 'mysql_query'
|
||||
case 'insert':
|
||||
return 'mysql_insert'
|
||||
case 'update':
|
||||
return 'mysql_update'
|
||||
case 'delete':
|
||||
return 'mysql_delete'
|
||||
case 'execute':
|
||||
return 'mysql_execute'
|
||||
default:
|
||||
throw new Error(`Invalid MySQL operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, ...rest } = params
|
||||
|
||||
// Parse JSON data if it's a string
|
||||
let parsedData
|
||||
if (data && typeof data === 'string' && data.trim()) {
|
||||
try {
|
||||
parsedData = JSON.parse(data)
|
||||
} catch (parseError) {
|
||||
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
|
||||
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
|
||||
}
|
||||
} else if (data && typeof data === 'object') {
|
||||
parsedData = data
|
||||
}
|
||||
|
||||
// Build connection config
|
||||
const connectionConfig = {
|
||||
host: rest.host,
|
||||
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 3306,
|
||||
database: rest.database,
|
||||
username: rest.username,
|
||||
password: rest.password,
|
||||
ssl: rest.ssl || 'preferred',
|
||||
}
|
||||
|
||||
// Build params object
|
||||
const result: any = { ...connectionConfig }
|
||||
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Database operation to perform' },
|
||||
host: { type: 'string', description: 'Database host' },
|
||||
port: { type: 'string', description: 'Database port' },
|
||||
database: { type: 'string', description: 'Database name' },
|
||||
username: { type: 'string', description: 'Database username' },
|
||||
password: { type: 'string', description: 'Database password' },
|
||||
ssl: { type: 'string', description: 'SSL mode' },
|
||||
table: { type: 'string', description: 'Table name' },
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message describing the operation outcome',
|
||||
},
|
||||
rows: {
|
||||
type: 'array',
|
||||
description: 'Array of rows returned from the query',
|
||||
},
|
||||
rowCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -200,15 +200,14 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
params: (params) => {
|
||||
const { credential, folderSelector, manualFolderId, mimeType, ...rest } = params
|
||||
|
||||
// Use folderSelector if provided, otherwise use manualFolderId
|
||||
const effectiveFolderId = (folderSelector || manualFolderId || '').trim()
|
||||
|
||||
return {
|
||||
...rest,
|
||||
accessToken: credential,
|
||||
folderId: effectiveFolderId,
|
||||
// Pass both; tools will prioritize manualFolderId over folderSelector
|
||||
folderSelector,
|
||||
manualFolderId,
|
||||
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
|
||||
mimeType: mimeType,
|
||||
...rest,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
109
apps/sim/blocks/blocks/parallel.ts
Normal file
109
apps/sim/blocks/blocks/parallel.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { ParallelIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export const ParallelBlock: BlockConfig<ToolResponse> = {
|
||||
type: 'parallel_ai',
|
||||
name: 'Parallel AI',
|
||||
description: 'Search with Parallel AI',
|
||||
longDescription:
|
||||
"Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.",
|
||||
docsLink: 'https://docs.parallel.ai/search-api/search-quickstart',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: ParallelIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'objective',
|
||||
title: 'Search Objective',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: "When was the United Nations established? Prefer UN's websites.",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'search_queries',
|
||||
title: 'Search Queries',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder:
|
||||
'Enter search queries separated by commas (e.g., "Founding year UN", "Year of founding United Nations")',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'processor',
|
||||
title: 'Processor',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Base', id: 'base' },
|
||||
{ label: 'Pro', id: 'pro' },
|
||||
],
|
||||
value: () => 'base',
|
||||
},
|
||||
{
|
||||
id: 'max_results',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: '5',
|
||||
},
|
||||
{
|
||||
id: 'max_chars_per_result',
|
||||
title: 'Max Chars',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: '1500',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter your Parallel AI API key',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['parallel_search'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert search_queries from comma-separated string to array (if provided)
|
||||
if (params.search_queries && typeof params.search_queries === 'string') {
|
||||
const queries = params.search_queries
|
||||
.split(',')
|
||||
.map((query: string) => query.trim())
|
||||
.filter((query: string) => query.length > 0)
|
||||
// Only set if we have actual queries
|
||||
if (queries.length > 0) {
|
||||
params.search_queries = queries
|
||||
} else {
|
||||
params.search_queries = undefined
|
||||
}
|
||||
}
|
||||
|
||||
// Convert numeric parameters
|
||||
if (params.max_results) {
|
||||
params.max_results = Number(params.max_results)
|
||||
}
|
||||
if (params.max_chars_per_result) {
|
||||
params.max_chars_per_result = Number(params.max_chars_per_result)
|
||||
}
|
||||
|
||||
return 'parallel_search'
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
objective: { type: 'string', description: 'Search objective or question' },
|
||||
search_queries: { type: 'string', description: 'Comma-separated search queries' },
|
||||
processor: { type: 'string', description: 'Processing method' },
|
||||
max_results: { type: 'number', description: 'Maximum number of results' },
|
||||
max_chars_per_result: { type: 'number', description: 'Maximum characters per result' },
|
||||
apiKey: { type: 'string', description: 'Parallel AI API key' },
|
||||
},
|
||||
outputs: {
|
||||
results: { type: 'array', description: 'Search results with excerpts from relevant pages' },
|
||||
},
|
||||
}
|
||||
261
apps/sim/blocks/blocks/postgresql.ts
Normal file
261
apps/sim/blocks/blocks/postgresql.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import { PostgresIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { PostgresResponse } from '@/tools/postgresql/types'
|
||||
|
||||
export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
|
||||
type: 'postgresql',
|
||||
name: 'PostgreSQL',
|
||||
description: 'Connect to PostgreSQL database',
|
||||
longDescription:
|
||||
'Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
|
||||
docsLink: 'https://docs.sim.ai/tools/postgresql',
|
||||
category: 'tools',
|
||||
bgColor: '#336791',
|
||||
icon: PostgresIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Query (SELECT)', id: 'query' },
|
||||
{ label: 'Insert Data', id: 'insert' },
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
{
|
||||
id: 'host',
|
||||
title: 'Host',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'localhost or your.database.host',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'port',
|
||||
title: 'Port',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: '5432',
|
||||
value: () => '5432',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'database',
|
||||
title: 'Database Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'your_database',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'username',
|
||||
title: 'Username',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'postgres',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'password',
|
||||
title: 'Password',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
password: true,
|
||||
placeholder: 'Your database password',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ssl',
|
||||
title: 'SSL Mode',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Disabled', id: 'disabled' },
|
||||
{ label: 'Required', id: 'required' },
|
||||
{ label: 'Preferred', id: 'preferred' },
|
||||
],
|
||||
value: () => 'preferred',
|
||||
},
|
||||
// Table field for insert/update/delete operations
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'table',
|
||||
title: 'Table Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'users',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
// SQL Query field
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM users WHERE active = true',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: 'SELECT * FROM table_name',
|
||||
condition: { field: 'operation', value: 'execute' },
|
||||
required: true,
|
||||
},
|
||||
// Data for insert operations
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
|
||||
condition: { field: 'operation', value: 'insert' },
|
||||
required: true,
|
||||
},
|
||||
// Set clause for updates
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Update Data (JSON)',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
// Where clause for update/delete
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'where',
|
||||
title: 'WHERE Condition',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'id = 1',
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'postgresql_query',
|
||||
'postgresql_insert',
|
||||
'postgresql_update',
|
||||
'postgresql_delete',
|
||||
'postgresql_execute',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'query':
|
||||
return 'postgresql_query'
|
||||
case 'insert':
|
||||
return 'postgresql_insert'
|
||||
case 'update':
|
||||
return 'postgresql_update'
|
||||
case 'delete':
|
||||
return 'postgresql_delete'
|
||||
case 'execute':
|
||||
return 'postgresql_execute'
|
||||
default:
|
||||
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, ...rest } = params
|
||||
|
||||
// Parse JSON data if it's a string
|
||||
let parsedData
|
||||
if (data && typeof data === 'string' && data.trim()) {
|
||||
try {
|
||||
parsedData = JSON.parse(data)
|
||||
} catch (parseError) {
|
||||
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
|
||||
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
|
||||
}
|
||||
} else if (data && typeof data === 'object') {
|
||||
parsedData = data
|
||||
}
|
||||
|
||||
// Build connection config
|
||||
const connectionConfig = {
|
||||
host: rest.host,
|
||||
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 5432,
|
||||
database: rest.database,
|
||||
username: rest.username,
|
||||
password: rest.password,
|
||||
ssl: rest.ssl || 'preferred',
|
||||
}
|
||||
|
||||
// Build params object
|
||||
const result: any = { ...connectionConfig }
|
||||
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Database operation to perform' },
|
||||
host: { type: 'string', description: 'Database host' },
|
||||
port: { type: 'string', description: 'Database port' },
|
||||
database: { type: 'string', description: 'Database name' },
|
||||
username: { type: 'string', description: 'Database username' },
|
||||
password: { type: 'string', description: 'Database password' },
|
||||
ssl: { type: 'string', description: 'SSL mode' },
|
||||
table: { type: 'string', description: 'Table name' },
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message describing the operation outcome',
|
||||
},
|
||||
rows: {
|
||||
type: 'array',
|
||||
description: 'Array of rows returned from the query',
|
||||
},
|
||||
rowCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -40,12 +40,15 @@ import { MicrosoftExcelBlock } from '@/blocks/blocks/microsoft_excel'
|
||||
import { MicrosoftPlannerBlock } from '@/blocks/blocks/microsoft_planner'
|
||||
import { MicrosoftTeamsBlock } from '@/blocks/blocks/microsoft_teams'
|
||||
import { MistralParseBlock } from '@/blocks/blocks/mistral_parse'
|
||||
import { MySQLBlock } from '@/blocks/blocks/mysql'
|
||||
import { NotionBlock } from '@/blocks/blocks/notion'
|
||||
import { OneDriveBlock } from '@/blocks/blocks/onedrive'
|
||||
import { OpenAIBlock } from '@/blocks/blocks/openai'
|
||||
import { OutlookBlock } from '@/blocks/blocks/outlook'
|
||||
import { ParallelBlock } from '@/blocks/blocks/parallel'
|
||||
import { PerplexityBlock } from '@/blocks/blocks/perplexity'
|
||||
import { PineconeBlock } from '@/blocks/blocks/pinecone'
|
||||
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
|
||||
import { QdrantBlock } from '@/blocks/blocks/qdrant'
|
||||
import { RedditBlock } from '@/blocks/blocks/reddit'
|
||||
import { ResponseBlock } from '@/blocks/blocks/response'
|
||||
@@ -113,12 +116,15 @@ export const registry: Record<string, BlockConfig> = {
|
||||
microsoft_planner: MicrosoftPlannerBlock,
|
||||
microsoft_teams: MicrosoftTeamsBlock,
|
||||
mistral_parse: MistralParseBlock,
|
||||
mysql: MySQLBlock,
|
||||
notion: NotionBlock,
|
||||
openai: OpenAIBlock,
|
||||
outlook: OutlookBlock,
|
||||
onedrive: OneDriveBlock,
|
||||
parallel_ai: ParallelBlock,
|
||||
perplexity: PerplexityBlock,
|
||||
pinecone: PineconeBlock,
|
||||
postgresql: PostgreSQLBlock,
|
||||
qdrant: QdrantBlock,
|
||||
memory: MemoryBlock,
|
||||
reddit: RedditBlock,
|
||||
|
||||
@@ -4,7 +4,7 @@ import type { ToolResponse } from '@/tools/types'
|
||||
// Basic types
|
||||
export type BlockIcon = (props: SVGProps<SVGSVGElement>) => JSX.Element
|
||||
export type ParamType = 'string' | 'number' | 'boolean' | 'json'
|
||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'any'
|
||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'array' | 'any'
|
||||
|
||||
// Block classification
|
||||
export type BlockCategory = 'blocks' | 'tools' | 'triggers'
|
||||
|
||||
@@ -3344,3 +3344,95 @@ export function MicrosoftPlannerIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ParallelIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
fill='currentColor'
|
||||
width='271'
|
||||
height='270'
|
||||
viewBox='0 0 271 270'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function PostgresIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='800px'
|
||||
height='800px'
|
||||
viewBox='-4 0 264 264'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMinYMin meet'
|
||||
>
|
||||
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
|
||||
<path
|
||||
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
|
||||
fill='#336791'
|
||||
/>
|
||||
<path
|
||||
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
|
||||
fill='#FFF'
|
||||
/>
|
||||
<path
|
||||
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
|
||||
fill='#FFF'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function MySQLIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='64'
|
||||
height='64'
|
||||
viewBox='0 0 25.6 25.6'
|
||||
>
|
||||
<path
|
||||
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
|
||||
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
|
||||
fillRule='evenodd'
|
||||
fill='#00678c'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { ChevronRight } from 'lucide-react'
|
||||
import { BlockPathCalculator } from '@/lib/block-path-calculator'
|
||||
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
|
||||
@@ -14,7 +14,7 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
import { getTriggersByProvider } from '@/triggers'
|
||||
import { getTrigger, getTriggersByProvider } from '@/triggers'
|
||||
|
||||
interface BlockTagGroup {
|
||||
blockName: string
|
||||
@@ -104,8 +104,8 @@ const getOutputTypeForPath = (
|
||||
outputPath: string
|
||||
): string => {
|
||||
if (block?.triggerMode && blockConfig?.triggers?.enabled) {
|
||||
const triggers = getTriggersByProvider(block.type)
|
||||
const firstTrigger = triggers[0]
|
||||
const triggerId = blockConfig?.triggers?.available?.[0]
|
||||
const firstTrigger = triggerId ? getTrigger(triggerId) : getTriggersByProvider(block.type)[0]
|
||||
|
||||
if (firstTrigger?.outputs) {
|
||||
const pathParts = outputPath.split('.')
|
||||
@@ -283,6 +283,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
onClose,
|
||||
style,
|
||||
}) => {
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
const [selectedIndex, setSelectedIndex] = useState(0)
|
||||
const [hoveredNested, setHoveredNested] = useState<{ tag: string; index: number } | null>(null)
|
||||
const [inSubmenu, setInSubmenu] = useState(false)
|
||||
@@ -417,8 +418,10 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
} else {
|
||||
if (sourceBlock?.triggerMode && blockConfig.triggers?.enabled) {
|
||||
const triggers = getTriggersByProvider(sourceBlock.type)
|
||||
const firstTrigger = triggers[0]
|
||||
const triggerId = blockConfig?.triggers?.available?.[0]
|
||||
const firstTrigger = triggerId
|
||||
? getTrigger(triggerId)
|
||||
: getTriggersByProvider(sourceBlock.type)[0]
|
||||
|
||||
if (firstTrigger?.outputs) {
|
||||
// Use trigger outputs instead of block outputs
|
||||
@@ -686,8 +689,10 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
} else {
|
||||
const blockState = blocks[accessibleBlockId]
|
||||
if (blockState?.triggerMode && blockConfig.triggers?.enabled) {
|
||||
const triggers = getTriggersByProvider(blockState.type) // Use block type as provider
|
||||
const firstTrigger = triggers[0]
|
||||
const triggerId = blockConfig?.triggers?.available?.[0]
|
||||
const firstTrigger = triggerId
|
||||
? getTrigger(triggerId)
|
||||
: getTriggersByProvider(blockState.type)[0]
|
||||
|
||||
if (firstTrigger?.outputs) {
|
||||
// Use trigger outputs instead of block outputs
|
||||
@@ -949,11 +954,54 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
}, [orderedTags.length, selectedIndex])
|
||||
|
||||
// Close on outside click/touch when dropdown is visible
|
||||
useEffect(() => {
|
||||
if (!visible) return
|
||||
|
||||
const handlePointerDown = (e: MouseEvent | TouchEvent) => {
|
||||
const el = containerRef.current
|
||||
if (!el) return
|
||||
const target = e.target as Node
|
||||
if (!el.contains(target)) {
|
||||
onClose?.()
|
||||
}
|
||||
}
|
||||
|
||||
// Use capture phase to detect before child handlers potentially stop propagation
|
||||
document.addEventListener('mousedown', handlePointerDown, true)
|
||||
document.addEventListener('touchstart', handlePointerDown, true)
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handlePointerDown, true)
|
||||
document.removeEventListener('touchstart', handlePointerDown, true)
|
||||
}
|
||||
}, [visible, onClose])
|
||||
|
||||
useEffect(() => {
|
||||
if (visible) {
|
||||
const handleKeyboardEvent = (e: KeyboardEvent) => {
|
||||
if (!orderedTags.length) return
|
||||
|
||||
const canEnterSubmenuForSelected = (): {
|
||||
groupIndex: number
|
||||
nestedTagIndex: number
|
||||
} | null => {
|
||||
if (selectedIndex < 0 || selectedIndex >= orderedTags.length) return null
|
||||
const selectedTag = orderedTags[selectedIndex]
|
||||
for (let gi = 0; gi < nestedBlockTagGroups.length; gi++) {
|
||||
const group = nestedBlockTagGroups[gi]
|
||||
for (let ni = 0; ni < group.nestedTags.length; ni++) {
|
||||
const nestedTag = group.nestedTags[ni]
|
||||
if (nestedTag.children && nestedTag.children.length > 0) {
|
||||
const firstChild = nestedTag.children[0]
|
||||
if (firstChild.fullTag === selectedTag) {
|
||||
return { groupIndex: gi, nestedTagIndex: ni }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
if (inSubmenu) {
|
||||
const currentHovered = hoveredNested
|
||||
if (!currentHovered) {
|
||||
@@ -1082,31 +1130,22 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
})
|
||||
break
|
||||
case 'ArrowRight':
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
if (selectedIndex >= 0 && selectedIndex < orderedTags.length) {
|
||||
const selectedTag = orderedTags[selectedIndex]
|
||||
for (const group of nestedBlockTagGroups) {
|
||||
for (
|
||||
let nestedTagIndex = 0;
|
||||
nestedTagIndex < group.nestedTags.length;
|
||||
nestedTagIndex++
|
||||
) {
|
||||
const nestedTag = group.nestedTags[nestedTagIndex]
|
||||
if (nestedTag.children && nestedTag.children.length > 0) {
|
||||
const firstChild = nestedTag.children[0]
|
||||
if (firstChild.fullTag === selectedTag) {
|
||||
setInSubmenu(true)
|
||||
setSubmenuIndex(0)
|
||||
setHoveredNested({
|
||||
tag: `${group.blockId}-${nestedTag.key}`,
|
||||
index: nestedTagIndex,
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
const targetLocation = canEnterSubmenuForSelected()
|
||||
if (!targetLocation) {
|
||||
// No submenu action for current selection; allow caret move
|
||||
return
|
||||
}
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
const group = nestedBlockTagGroups[targetLocation.groupIndex]
|
||||
const nestedTag = group.nestedTags[targetLocation.nestedTagIndex]
|
||||
setInSubmenu(true)
|
||||
setSubmenuIndex(0)
|
||||
setHoveredNested({
|
||||
tag: `${group.blockId}-${nestedTag.key}`,
|
||||
index: targetLocation.nestedTagIndex,
|
||||
})
|
||||
}
|
||||
break
|
||||
case 'Enter':
|
||||
@@ -1173,6 +1212,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={containerRef}
|
||||
className={cn(
|
||||
'absolute z-[9999] mt-1 w-full overflow-visible rounded-md border bg-popover shadow-md',
|
||||
className
|
||||
|
||||
@@ -23,6 +23,33 @@ const DEFAULT_FUNCTION_TIMEOUT = 5000
|
||||
const REQUEST_TIMEOUT = 120000
|
||||
const CUSTOM_TOOL_PREFIX = 'custom_'
|
||||
|
||||
/**
|
||||
* Helper function to collect runtime block outputs and name mappings
|
||||
* for tag resolution in custom tools and prompts
|
||||
*/
|
||||
function collectBlockData(context: ExecutionContext): {
|
||||
blockData: Record<string, any>
|
||||
blockNameMapping: Record<string, string>
|
||||
} {
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
|
||||
for (const [id, state] of context.blockStates.entries()) {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
// Map both the display name and normalized form
|
||||
blockNameMapping[workflowBlock.metadata.name] = id
|
||||
const normalized = workflowBlock.metadata.name.replace(/\s+/g, '').toLowerCase()
|
||||
blockNameMapping[normalized] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { blockData, blockNameMapping }
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler for Agent blocks that process LLM requests with optional tools.
|
||||
*/
|
||||
@@ -172,6 +199,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
// Merge user-provided parameters with LLM-generated parameters
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
// Collect block outputs for tag resolution
|
||||
const { blockData, blockNameMapping } = collectBlockData(context)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
{
|
||||
@@ -179,6 +209,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
...mergedParams,
|
||||
timeout: tool.timeout ?? DEFAULT_FUNCTION_TIMEOUT,
|
||||
envVars: context.environmentVariables || {},
|
||||
workflowVariables: context.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
isCustomTool: true,
|
||||
_context: { workflowId: context.workflowId },
|
||||
},
|
||||
@@ -352,6 +385,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
|
||||
const validMessages = this.validateMessages(messages)
|
||||
|
||||
// Collect block outputs for runtime resolution
|
||||
const { blockData, blockNameMapping } = collectBlockData(context)
|
||||
|
||||
return {
|
||||
provider: providerId,
|
||||
model,
|
||||
@@ -368,6 +404,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
stream: streaming,
|
||||
messages,
|
||||
environmentVariables: context.environmentVariables || {},
|
||||
workflowVariables: context.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
reasoningEffort: inputs.reasoningEffort,
|
||||
verbosity: inputs.verbosity,
|
||||
}
|
||||
@@ -457,6 +496,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
|
||||
const finalApiKey = this.getApiKey(providerId, model, providerRequest.apiKey)
|
||||
|
||||
// Collect block outputs for runtime resolution
|
||||
const { blockData, blockNameMapping } = collectBlockData(context)
|
||||
|
||||
const response = await executeProviderRequest(providerId, {
|
||||
model,
|
||||
systemPrompt: 'systemPrompt' in providerRequest ? providerRequest.systemPrompt : undefined,
|
||||
@@ -472,6 +514,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
stream: providerRequest.stream,
|
||||
messages: 'messages' in providerRequest ? providerRequest.messages : undefined,
|
||||
environmentVariables: context.environmentVariables || {},
|
||||
workflowVariables: context.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
})
|
||||
|
||||
this.logExecutionSuccess(providerId, model, context, block, providerStartTime, response)
|
||||
|
||||
@@ -77,6 +77,7 @@ describe('FunctionBlockHandler', () => {
|
||||
code: inputs.code,
|
||||
timeout: inputs.timeout,
|
||||
envVars: {},
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
@@ -108,6 +109,7 @@ describe('FunctionBlockHandler', () => {
|
||||
code: expectedCode,
|
||||
timeout: inputs.timeout,
|
||||
envVars: {},
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
@@ -132,6 +134,7 @@ describe('FunctionBlockHandler', () => {
|
||||
code: inputs.code,
|
||||
timeout: 5000, // Default timeout
|
||||
envVars: {},
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
|
||||
@@ -6,6 +6,33 @@ import { executeTool } from '@/tools'
|
||||
|
||||
const logger = createLogger('FunctionBlockHandler')
|
||||
|
||||
/**
|
||||
* Helper function to collect runtime block outputs and name mappings
|
||||
* for tag resolution in function execution
|
||||
*/
|
||||
function collectBlockData(context: ExecutionContext): {
|
||||
blockData: Record<string, any>
|
||||
blockNameMapping: Record<string, string>
|
||||
} {
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
|
||||
for (const [id, state] of context.blockStates.entries()) {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
// Map both the display name and normalized form
|
||||
blockNameMapping[workflowBlock.metadata.name] = id
|
||||
const normalized = workflowBlock.metadata.name.replace(/\s+/g, '').toLowerCase()
|
||||
blockNameMapping[normalized] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { blockData, blockNameMapping }
|
||||
}
|
||||
|
||||
/**
|
||||
* Handler for Function blocks that execute custom code.
|
||||
*/
|
||||
@@ -24,20 +51,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
: inputs.code
|
||||
|
||||
// Extract block data for variable resolution
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
|
||||
for (const [blockId, blockState] of context.blockStates.entries()) {
|
||||
if (blockState.output) {
|
||||
blockData[blockId] = blockState.output
|
||||
|
||||
// Try to find the block name from the workflow
|
||||
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === blockId)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
blockNameMapping[workflowBlock.metadata.name] = blockId
|
||||
}
|
||||
}
|
||||
}
|
||||
const { blockData, blockNameMapping } = collectBlockData(context)
|
||||
|
||||
// Directly use the function_execute tool which calls the API route
|
||||
const result = await executeTool(
|
||||
@@ -46,6 +60,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
code: codeContent,
|
||||
timeout: inputs.timeout || 5000,
|
||||
envVars: context.environmentVariables || {},
|
||||
workflowVariables: context.workflowVariables || {},
|
||||
blockData: blockData, // Pass block data for variable resolution
|
||||
blockNameMapping: blockNameMapping, // Pass block name to ID mapping
|
||||
_context: { workflowId: context.workflowId },
|
||||
|
||||
@@ -222,9 +222,10 @@ export class LoopBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a resolver, use it to resolve any block references in the expression
|
||||
// If we have a resolver, use it to resolve any variable references first, then block references
|
||||
if (this.resolver) {
|
||||
const resolved = this.resolver.resolveBlockReferences(forEachItems, context, block)
|
||||
const resolvedVars = this.resolver.resolveVariableReferences(forEachItems, block)
|
||||
const resolved = this.resolver.resolveBlockReferences(resolvedVars, context, block)
|
||||
|
||||
// Try to parse the resolved value
|
||||
try {
|
||||
|
||||
@@ -413,9 +413,10 @@ export class ParallelBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a resolver, use it to resolve any block references in the expression
|
||||
// If we have a resolver, use it to resolve any variable references first, then block references
|
||||
if (this.resolver) {
|
||||
const resolved = this.resolver.resolveBlockReferences(distribution, context, block)
|
||||
const resolvedVars = this.resolver.resolveVariableReferences(distribution, block)
|
||||
const resolved = this.resolver.resolveBlockReferences(resolvedVars, context, block)
|
||||
|
||||
// Try to parse the resolved value
|
||||
try {
|
||||
|
||||
@@ -63,6 +63,18 @@ export class TriggerBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'microsoftteams') {
|
||||
const providerData = (starterOutput as any)[provider] || webhookData[provider] || {}
|
||||
// Expose the raw Teams message payload at the root for easy indexing
|
||||
const payloadSource = providerData?.message?.raw || webhookData.payload || {}
|
||||
return {
|
||||
...payloadSource,
|
||||
// Keep nested copy for backwards compatibility with existing workflows
|
||||
[provider]: providerData,
|
||||
webhook: starterOutput.webhook,
|
||||
}
|
||||
}
|
||||
|
||||
// Provider-specific early return for Airtable: preserve raw shape entirely
|
||||
if (provider === 'airtable') {
|
||||
return starterOutput
|
||||
|
||||
@@ -119,23 +119,12 @@ export class Executor {
|
||||
if (options.contextExtensions) {
|
||||
this.contextExtensions = options.contextExtensions
|
||||
this.isChildExecution = options.contextExtensions.isChildExecution || false
|
||||
|
||||
if (this.contextExtensions.stream) {
|
||||
logger.info('Executor initialized with streaming enabled', {
|
||||
hasSelectedOutputIds: Array.isArray(this.contextExtensions.selectedOutputIds),
|
||||
selectedOutputCount: Array.isArray(this.contextExtensions.selectedOutputIds)
|
||||
? this.contextExtensions.selectedOutputIds.length
|
||||
: 0,
|
||||
selectedOutputIds: this.contextExtensions.selectedOutputIds || [],
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.actualWorkflow = workflowParam
|
||||
|
||||
if (workflowInput) {
|
||||
this.workflowInput = workflowInput
|
||||
logger.info('[Executor] Using workflow input:', JSON.stringify(this.workflowInput, null, 2))
|
||||
} else {
|
||||
this.workflowInput = {}
|
||||
}
|
||||
@@ -400,8 +389,7 @@ export class Executor {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
} catch (releaseError: any) {
|
||||
// Reader might already be released
|
||||
logger.debug('Reader already released:', releaseError)
|
||||
// Reader might already be released - this is expected and safe to ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -641,15 +629,12 @@ export class Executor {
|
||||
* @throws Error if workflow validation fails
|
||||
*/
|
||||
private validateWorkflow(startBlockId?: string): void {
|
||||
let validationBlock: SerializedBlock | undefined
|
||||
|
||||
if (startBlockId) {
|
||||
// If starting from a specific block (webhook trigger or schedule trigger), validate that block exists
|
||||
const startBlock = this.actualWorkflow.blocks.find((block) => block.id === startBlockId)
|
||||
if (!startBlock || !startBlock.enabled) {
|
||||
throw new Error(`Start block ${startBlockId} not found or disabled`)
|
||||
}
|
||||
validationBlock = startBlock
|
||||
// Trigger blocks (webhook and schedule) can have incoming connections, so no need to check that
|
||||
} else {
|
||||
// Default validation for starter block
|
||||
@@ -659,7 +644,6 @@ export class Executor {
|
||||
if (!starterBlock || !starterBlock.enabled) {
|
||||
throw new Error('Workflow must have an enabled starter block')
|
||||
}
|
||||
validationBlock = starterBlock
|
||||
|
||||
const incomingToStarter = this.actualWorkflow.connections.filter(
|
||||
(conn) => conn.target === starterBlock.id
|
||||
@@ -741,6 +725,7 @@ export class Executor {
|
||||
duration: 0, // Initialize with zero, will be updated throughout execution
|
||||
},
|
||||
environmentVariables: this.environmentVariables,
|
||||
workflowVariables: this.workflowVariables,
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map(),
|
||||
@@ -808,11 +793,6 @@ export class Executor {
|
||||
? this.workflowInput.input[field.name] // Try to get from input.field
|
||||
: this.workflowInput?.[field.name] // Fallback to direct field access
|
||||
|
||||
logger.info(
|
||||
`[Executor] Processing input field ${field.name} (${field.type}):`,
|
||||
inputValue !== undefined ? JSON.stringify(inputValue) : 'undefined'
|
||||
)
|
||||
|
||||
if (inputValue === undefined || inputValue === null) {
|
||||
if (Object.hasOwn(field, 'value')) {
|
||||
inputValue = (field as any).value
|
||||
@@ -873,8 +853,6 @@ export class Executor {
|
||||
blockOutput.files = this.workflowInput.files
|
||||
}
|
||||
|
||||
logger.info(`[Executor] Starting block output:`, JSON.stringify(blockOutput, null, 2))
|
||||
|
||||
context.blockStates.set(initBlock.id, {
|
||||
output: blockOutput,
|
||||
executed: true,
|
||||
@@ -967,11 +945,6 @@ export class Executor {
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
'[Executor] Fallback starting block output:',
|
||||
JSON.stringify(blockOutput, null, 2)
|
||||
)
|
||||
|
||||
context.blockStates.set(initBlock.id, {
|
||||
output: blockOutput,
|
||||
executed: true,
|
||||
@@ -1342,7 +1315,7 @@ export class Executor {
|
||||
const results: (NormalizedBlockOutput | StreamingExecution)[] = []
|
||||
const errors: Error[] = []
|
||||
|
||||
settledResults.forEach((result, index) => {
|
||||
settledResults.forEach((result) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value)
|
||||
} else {
|
||||
@@ -1443,7 +1416,6 @@ export class Executor {
|
||||
}
|
||||
|
||||
const addConsole = useConsoleStore.getState().addConsole
|
||||
const { setActiveBlocks } = useExecutionStore.getState()
|
||||
|
||||
try {
|
||||
if (block.enabled === false) {
|
||||
|
||||
@@ -107,6 +107,7 @@ export interface ExecutionContext {
|
||||
blockLogs: BlockLog[] // Chronological log of block executions
|
||||
metadata: ExecutionMetadata // Timing metadata for the execution
|
||||
environmentVariables: Record<string, string> // Environment variables available during execution
|
||||
workflowVariables?: Record<string, any> // Workflow variables available during execution
|
||||
|
||||
// Routing decisions for path determination
|
||||
decisions: {
|
||||
|
||||
@@ -145,12 +145,9 @@ export async function initializeBillingPeriod(
|
||||
end = billingPeriod.end
|
||||
}
|
||||
|
||||
// Update user stats with billing period info
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
billingPeriodStart: start,
|
||||
billingPeriodEnd: end,
|
||||
currentPeriodCost: '0',
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
@@ -212,14 +209,12 @@ export async function resetUserBillingPeriod(userId: string): Promise<void> {
|
||||
newPeriodEnd = billingPeriod.end
|
||||
}
|
||||
|
||||
// Archive current period cost and reset for new period
|
||||
// Archive current period cost and reset for new period (no longer updating period dates in user_stats)
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
lastPeriodCost: currentPeriodCost, // Archive previous period
|
||||
currentPeriodCost: '0', // Reset to zero for new period
|
||||
billingPeriodStart: newPeriodStart,
|
||||
billingPeriodEnd: newPeriodEnd,
|
||||
lastPeriodCost: currentPeriodCost,
|
||||
currentPeriodCost: '0',
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { member, organization, subscription, user, userStats } from '@/db/schema'
|
||||
import { member, organization, subscription, user } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('Billing')
|
||||
|
||||
@@ -673,15 +673,14 @@ export async function getUsersAndOrganizationsForOverageBilling(): Promise<{
|
||||
continue // Skip free plans
|
||||
}
|
||||
|
||||
// Check if subscription period ends today
|
||||
// Check if subscription period ends today (range-based, inclusive of day)
|
||||
let shouldBillToday = false
|
||||
|
||||
if (sub.periodEnd) {
|
||||
const periodEnd = new Date(sub.periodEnd)
|
||||
periodEnd.setUTCHours(0, 0, 0, 0) // Normalize to start of day
|
||||
const endsToday = periodEnd >= today && periodEnd < tomorrow
|
||||
|
||||
// Bill if the subscription period ends today
|
||||
if (periodEnd.getTime() === today.getTime()) {
|
||||
if (endsToday) {
|
||||
shouldBillToday = true
|
||||
logger.info('Subscription period ends today', {
|
||||
referenceId: sub.referenceId,
|
||||
@@ -689,29 +688,6 @@ export async function getUsersAndOrganizationsForOverageBilling(): Promise<{
|
||||
periodEnd: sub.periodEnd,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Fallback: Check userStats billing period for users
|
||||
const userStatsRecord = await db
|
||||
.select({
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
})
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, sub.referenceId))
|
||||
.limit(1)
|
||||
|
||||
if (userStatsRecord.length > 0 && userStatsRecord[0].billingPeriodEnd) {
|
||||
const billingPeriodEnd = new Date(userStatsRecord[0].billingPeriodEnd)
|
||||
billingPeriodEnd.setUTCHours(0, 0, 0, 0) // Normalize to start of day
|
||||
|
||||
if (billingPeriodEnd.getTime() === today.getTime()) {
|
||||
shouldBillToday = true
|
||||
logger.info('User billing period ends today (from userStats)', {
|
||||
userId: sub.referenceId,
|
||||
plan: sub.plan,
|
||||
billingPeriodEnd: userStatsRecord[0].billingPeriodEnd,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldBillToday) {
|
||||
|
||||
@@ -94,8 +94,6 @@ export async function getOrganizationBillingData(
|
||||
// User stats fields
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
lastActive: userStats.lastActive,
|
||||
})
|
||||
.from(member)
|
||||
@@ -151,10 +149,9 @@ export async function getOrganizationBillingData(
|
||||
|
||||
const averageUsagePerMember = members.length > 0 ? totalCurrentUsage / members.length : 0
|
||||
|
||||
// Get billing period from first member (should be consistent across org)
|
||||
const firstMember = membersWithUsage[0]
|
||||
const billingPeriodStart = firstMember?.billingPeriodStart || null
|
||||
const billingPeriodEnd = firstMember?.billingPeriodEnd || null
|
||||
// Billing period comes from the organization's subscription
|
||||
const billingPeriodStart = subscription.periodStart || null
|
||||
const billingPeriodEnd = subscription.periodEnd || null
|
||||
|
||||
return {
|
||||
organizationId,
|
||||
|
||||
@@ -41,6 +41,7 @@ export async function getUserUsageData(userId: string): Promise<UsageData> {
|
||||
}
|
||||
|
||||
const stats = userStatsData[0]
|
||||
const subscription = await getHighestPrioritySubscription(userId)
|
||||
const currentUsage = Number.parseFloat(
|
||||
stats.currentPeriodCost?.toString() ?? stats.totalCost.toString()
|
||||
)
|
||||
@@ -49,14 +50,19 @@ export async function getUserUsageData(userId: string): Promise<UsageData> {
|
||||
const isWarning = percentUsed >= 80
|
||||
const isExceeded = currentUsage >= limit
|
||||
|
||||
// Derive billing period dates from subscription (source of truth).
|
||||
// For free users or missing dates, expose nulls.
|
||||
const billingPeriodStart = subscription?.periodStart ?? null
|
||||
const billingPeriodEnd = subscription?.periodEnd ?? null
|
||||
|
||||
return {
|
||||
currentUsage,
|
||||
limit,
|
||||
percentUsed,
|
||||
isWarning,
|
||||
isExceeded,
|
||||
billingPeriodStart: stats.billingPeriodStart,
|
||||
billingPeriodEnd: stats.billingPeriodEnd,
|
||||
billingPeriodStart,
|
||||
billingPeriodEnd,
|
||||
lastPeriodCost: Number.parseFloat(stats.lastPeriodCost?.toString() || '0'),
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type Stripe from 'stripe'
|
||||
import {
|
||||
resetOrganizationBillingPeriod,
|
||||
resetUserBillingPeriod,
|
||||
} from '@/lib/billing/core/billing-periods'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { subscription as subscriptionTable } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('StripeInvoiceWebhooks')
|
||||
|
||||
@@ -11,27 +18,75 @@ export async function handleInvoicePaymentSucceeded(event: Stripe.Event) {
|
||||
try {
|
||||
const invoice = event.data.object as Stripe.Invoice
|
||||
|
||||
// Check if this is an overage billing invoice
|
||||
if (invoice.metadata?.type !== 'overage_billing') {
|
||||
logger.info('Ignoring non-overage billing invoice', { invoiceId: invoice.id })
|
||||
// Case 1: Overage invoices (metadata.type === 'overage_billing')
|
||||
if (invoice.metadata?.type === 'overage_billing') {
|
||||
const customerId = invoice.customer as string
|
||||
const chargedAmount = invoice.amount_paid / 100
|
||||
const billingPeriod = invoice.metadata?.billingPeriod || 'unknown'
|
||||
|
||||
logger.info('Overage billing invoice payment succeeded', {
|
||||
invoiceId: invoice.id,
|
||||
customerId,
|
||||
chargedAmount,
|
||||
billingPeriod,
|
||||
customerEmail: invoice.customer_email,
|
||||
hostedInvoiceUrl: invoice.hosted_invoice_url,
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const customerId = invoice.customer as string
|
||||
const chargedAmount = invoice.amount_paid / 100 // Convert from cents to dollars
|
||||
const billingPeriod = invoice.metadata?.billingPeriod || 'unknown'
|
||||
// Case 2: Subscription renewal invoice paid (primary period rollover)
|
||||
// Only reset on successful payment to avoid granting a new period while in dunning
|
||||
if (invoice.subscription) {
|
||||
// Filter to subscription-cycle renewals; ignore updates/off-cycle charges
|
||||
const reason = invoice.billing_reason
|
||||
const isCycle = reason === 'subscription_cycle'
|
||||
if (!isCycle) {
|
||||
logger.info('Ignoring non-cycle subscription invoice on payment_succeeded', {
|
||||
invoiceId: invoice.id,
|
||||
billingReason: reason,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Overage billing invoice payment succeeded', {
|
||||
invoiceId: invoice.id,
|
||||
customerId,
|
||||
chargedAmount,
|
||||
billingPeriod,
|
||||
customerEmail: invoice.customer_email,
|
||||
hostedInvoiceUrl: invoice.hosted_invoice_url,
|
||||
})
|
||||
const stripeSubscriptionId = String(invoice.subscription)
|
||||
const records = await db
|
||||
.select()
|
||||
.from(subscriptionTable)
|
||||
.where(eq(subscriptionTable.stripeSubscriptionId, stripeSubscriptionId))
|
||||
.limit(1)
|
||||
|
||||
// Additional payment success logic can be added here
|
||||
// For example: update internal billing status, trigger analytics events, etc.
|
||||
if (records.length === 0) {
|
||||
logger.warn('No matching internal subscription for paid Stripe invoice', {
|
||||
invoiceId: invoice.id,
|
||||
stripeSubscriptionId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const sub = records[0]
|
||||
|
||||
if (sub.plan === 'team' || sub.plan === 'enterprise') {
|
||||
await resetOrganizationBillingPeriod(sub.referenceId)
|
||||
logger.info('Reset organization billing period on subscription invoice payment', {
|
||||
invoiceId: invoice.id,
|
||||
organizationId: sub.referenceId,
|
||||
plan: sub.plan,
|
||||
})
|
||||
} else {
|
||||
await resetUserBillingPeriod(sub.referenceId)
|
||||
logger.info('Reset user billing period on subscription invoice payment', {
|
||||
invoiceId: invoice.id,
|
||||
userId: sub.referenceId,
|
||||
plan: sub.plan,
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Ignoring non-subscription invoice payment', { invoiceId: invoice.id })
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle invoice payment succeeded', {
|
||||
eventId: event.id,
|
||||
@@ -98,28 +153,23 @@ export async function handleInvoicePaymentFailed(event: Stripe.Event) {
|
||||
export async function handleInvoiceFinalized(event: Stripe.Event) {
|
||||
try {
|
||||
const invoice = event.data.object as Stripe.Invoice
|
||||
|
||||
// Check if this is an overage billing invoice
|
||||
if (invoice.metadata?.type !== 'overage_billing') {
|
||||
logger.info('Ignoring non-overage billing invoice finalization', { invoiceId: invoice.id })
|
||||
// Do not reset usage on finalized; wait for payment success to avoid granting new period during dunning
|
||||
if (invoice.metadata?.type === 'overage_billing') {
|
||||
const customerId = invoice.customer as string
|
||||
const invoiceAmount = invoice.amount_due / 100
|
||||
const billingPeriod = invoice.metadata?.billingPeriod || 'unknown'
|
||||
logger.info('Overage billing invoice finalized', {
|
||||
invoiceId: invoice.id,
|
||||
customerId,
|
||||
invoiceAmount,
|
||||
billingPeriod,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const customerId = invoice.customer as string
|
||||
const invoiceAmount = invoice.amount_due / 100 // Convert from cents to dollars
|
||||
const billingPeriod = invoice.metadata?.billingPeriod || 'unknown'
|
||||
|
||||
logger.info('Overage billing invoice finalized', {
|
||||
logger.info('Ignoring subscription invoice finalization; will act on payment_succeeded', {
|
||||
invoiceId: invoice.id,
|
||||
customerId,
|
||||
invoiceAmount,
|
||||
billingPeriod,
|
||||
customerEmail: invoice.customer_email,
|
||||
hostedInvoiceUrl: invoice.hosted_invoice_url,
|
||||
billingReason: invoice.billing_reason,
|
||||
})
|
||||
|
||||
// Additional invoice finalization logic can be added here
|
||||
// For example: update internal records, trigger notifications, etc.
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle invoice finalized', {
|
||||
eventId: event.id,
|
||||
|
||||
@@ -6,17 +6,11 @@ import { apiKey as apiKeyTable } from '@/db/schema'
|
||||
|
||||
export type { NotificationStatus } from '@/lib/copilot/types'
|
||||
|
||||
/**
|
||||
* Authentication result for copilot API routes
|
||||
*/
|
||||
export interface CopilotAuthResult {
|
||||
userId: string | null
|
||||
isAuthenticated: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard error response helpers for copilot API routes
|
||||
*/
|
||||
export function createUnauthorizedResponse(): NextResponse {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
@@ -33,9 +27,6 @@ export function createInternalServerErrorResponse(message: string): NextResponse
|
||||
return NextResponse.json({ error: message }, { status: 500 })
|
||||
}
|
||||
|
||||
/**
|
||||
* Request tracking helpers for copilot API routes
|
||||
*/
|
||||
export function createRequestId(): string {
|
||||
return crypto.randomUUID()
|
||||
}
|
||||
@@ -63,20 +54,13 @@ export function createRequestTracker(short = true): RequestTracker {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate request using session or API key fallback
|
||||
* Returns userId if authenticated, null otherwise
|
||||
*/
|
||||
export async function authenticateCopilotRequest(req: NextRequest): Promise<CopilotAuthResult> {
|
||||
// Try session authentication first
|
||||
const session = await getSession()
|
||||
let userId: string | null = session?.user?.id || null
|
||||
|
||||
// If no session, check for API key auth
|
||||
if (!userId) {
|
||||
const apiKeyHeader = req.headers.get('x-api-key')
|
||||
if (apiKeyHeader) {
|
||||
// Verify API key
|
||||
const [apiKeyRecord] = await db
|
||||
.select({ userId: apiKeyTable.userId })
|
||||
.from(apiKeyTable)
|
||||
@@ -95,10 +79,6 @@ export async function authenticateCopilotRequest(req: NextRequest): Promise<Copi
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticate request using session only (no API key fallback)
|
||||
* Returns userId if authenticated, null otherwise
|
||||
*/
|
||||
export async function authenticateCopilotRequestSessionOnly(): Promise<CopilotAuthResult> {
|
||||
const session = await getSession()
|
||||
const userId = session?.user?.id || null
|
||||
|
||||
@@ -74,17 +74,11 @@ export interface CopilotConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and return a ProviderId if valid, otherwise return null
|
||||
*/
|
||||
function validateProviderId(value: string | undefined): ProviderId | null {
|
||||
if (!value) return null
|
||||
return VALID_PROVIDER_IDS.includes(value as ProviderId) ? (value as ProviderId) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely parse a float from environment variable with validation
|
||||
*/
|
||||
function parseFloatEnv(value: string | undefined, name: string): number | null {
|
||||
if (!value) return null
|
||||
const parsed = Number.parseFloat(value)
|
||||
@@ -95,9 +89,6 @@ function parseFloatEnv(value: string | undefined, name: string): number | null {
|
||||
return parsed
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely parse an integer from environment variable with validation
|
||||
*/
|
||||
function parseIntEnv(value: string | undefined, name: string): number | null {
|
||||
if (!value) return null
|
||||
const parsed = Number.parseInt(value, 10)
|
||||
@@ -108,18 +99,11 @@ function parseIntEnv(value: string | undefined, name: string): number | null {
|
||||
return parsed
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely parse a boolean from environment variable
|
||||
*/
|
||||
function parseBooleanEnv(value: string | undefined): boolean | null {
|
||||
if (!value) return null
|
||||
return value.toLowerCase() === 'true'
|
||||
}
|
||||
|
||||
/**
|
||||
* Default copilot configuration
|
||||
* Uses Claude 4 Sonnet
|
||||
*/
|
||||
export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
|
||||
chat: {
|
||||
defaultProvider: 'anthropic',
|
||||
@@ -144,11 +128,7 @@ export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply environment variable overrides to configuration
|
||||
*/
|
||||
function applyEnvironmentOverrides(config: CopilotConfig): void {
|
||||
// Chat configuration overrides
|
||||
const chatProvider = validateProviderId(process.env.COPILOT_CHAT_PROVIDER)
|
||||
if (chatProvider) {
|
||||
config.chat.defaultProvider = chatProvider
|
||||
@@ -175,7 +155,6 @@ function applyEnvironmentOverrides(config: CopilotConfig): void {
|
||||
config.chat.maxTokens = chatMaxTokens
|
||||
}
|
||||
|
||||
// RAG configuration overrides
|
||||
const ragProvider = validateProviderId(process.env.COPILOT_RAG_PROVIDER)
|
||||
if (ragProvider) {
|
||||
config.rag.defaultProvider = ragProvider
|
||||
@@ -215,7 +194,6 @@ function applyEnvironmentOverrides(config: CopilotConfig): void {
|
||||
config.rag.similarityThreshold = ragSimilarityThreshold
|
||||
}
|
||||
|
||||
// General configuration overrides
|
||||
const streamingEnabled = parseBooleanEnv(process.env.COPILOT_STREAMING_ENABLED)
|
||||
if (streamingEnabled !== null) {
|
||||
config.general.streamingEnabled = streamingEnabled
|
||||
@@ -234,9 +212,6 @@ function applyEnvironmentOverrides(config: CopilotConfig): void {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get copilot configuration with environment variable overrides
|
||||
*/
|
||||
export function getCopilotConfig(): CopilotConfig {
|
||||
const config = structuredClone(DEFAULT_COPILOT_CONFIG)
|
||||
|
||||
@@ -257,9 +232,6 @@ export function getCopilotConfig(): CopilotConfig {
|
||||
return config
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the model to use for a specific copilot function
|
||||
*/
|
||||
export function getCopilotModel(type: CopilotModelType): {
|
||||
provider: ProviderId
|
||||
model: string
|
||||
@@ -287,9 +259,6 @@ export function getCopilotModel(type: CopilotModelType): {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a numeric value against constraints
|
||||
*/
|
||||
function validateNumericValue(
|
||||
value: number,
|
||||
constraint: { min: number; max: number },
|
||||
@@ -301,13 +270,9 @@ function validateNumericValue(
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a provider/model combination is available
|
||||
*/
|
||||
export function validateCopilotConfig(config: CopilotConfig): ValidationResult {
|
||||
const errors: string[] = []
|
||||
|
||||
// Validate chat provider/model
|
||||
try {
|
||||
const chatDefaultModel = getProviderDefaultModel(config.chat.defaultProvider)
|
||||
if (!chatDefaultModel) {
|
||||
@@ -317,7 +282,6 @@ export function validateCopilotConfig(config: CopilotConfig): ValidationResult {
|
||||
errors.push(`Invalid chat provider: ${config.chat.defaultProvider}`)
|
||||
}
|
||||
|
||||
// Validate RAG provider/model
|
||||
try {
|
||||
const ragDefaultModel = getProviderDefaultModel(config.rag.defaultProvider)
|
||||
if (!ragDefaultModel) {
|
||||
@@ -327,7 +291,6 @@ export function validateCopilotConfig(config: CopilotConfig): ValidationResult {
|
||||
errors.push(`Invalid RAG provider: ${config.rag.defaultProvider}`)
|
||||
}
|
||||
|
||||
// Validate configuration values using constraints
|
||||
const validationChecks = [
|
||||
{
|
||||
value: config.chat.temperature,
|
||||
|
||||
@@ -1,385 +0,0 @@
|
||||
/**
|
||||
* YAML Workflow Examples for Copilot
|
||||
*
|
||||
* This file contains example YAML workflows that the copilot can reference
|
||||
* when helping users build workflows.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Map of workflow examples with human-readable IDs to YAML content
|
||||
*/
|
||||
export const WORKFLOW_EXAMPLES: Record<string, string> = {
|
||||
'basic-agent': `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: greeting-agent
|
||||
greeting-agent:
|
||||
type: agent
|
||||
name: Greeting Agent
|
||||
inputs:
|
||||
systemPrompt: be nice
|
||||
userPrompt: <start.input>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'`,
|
||||
|
||||
tool_call_agent: `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: research-agent
|
||||
research-agent:
|
||||
type: agent
|
||||
name: Greeting Agent
|
||||
inputs:
|
||||
systemPrompt: research the topic the user provides
|
||||
userPrompt: <start.input>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
tools:
|
||||
- type: exa
|
||||
title: Exa
|
||||
toolId: exa_search
|
||||
params:
|
||||
type: auto
|
||||
apiKey: '{{EXA_API_KEY}}'
|
||||
isExpanded: true
|
||||
operation: exa_search
|
||||
usageControl: auto`,
|
||||
|
||||
'basic-api': `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: api-call
|
||||
api-call:
|
||||
type: api
|
||||
name: API 1
|
||||
inputs:
|
||||
url: https://url
|
||||
method: POST
|
||||
params:
|
||||
- id: param-1
|
||||
cells:
|
||||
Key: queryparam1
|
||||
Value: queryval1
|
||||
- id: param-2
|
||||
cells:
|
||||
Key: queryparam2
|
||||
Value: queryval2
|
||||
headers:
|
||||
- id: header-1
|
||||
cells:
|
||||
Key: X-CSRF-HEADER
|
||||
Value: '-'
|
||||
- id: header-2
|
||||
cells:
|
||||
Key: Authorization
|
||||
Value: Bearer {{API_KEY}}
|
||||
body: |-
|
||||
{
|
||||
body
|
||||
}`,
|
||||
|
||||
'multi-agent': `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: agent-1
|
||||
agent-1:
|
||||
type: agent
|
||||
name: Agent 1
|
||||
inputs:
|
||||
systemPrompt: agent1 sys
|
||||
userPrompt: agent 1 user
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
connections:
|
||||
success:
|
||||
- agent-2
|
||||
- agent-3
|
||||
agent-2:
|
||||
type: agent
|
||||
name: Agent 2
|
||||
inputs:
|
||||
systemPrompt: agent2sys
|
||||
userPrompt: agent2 user
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
agent-3:
|
||||
type: agent
|
||||
name: Agent 3
|
||||
inputs:
|
||||
systemPrompt: agent3 sys
|
||||
userPrompt: agent3 user
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'`,
|
||||
|
||||
'iter-loop': `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: count-loop
|
||||
count-loop:
|
||||
type: loop
|
||||
name: Loop 1
|
||||
inputs:
|
||||
count: 5
|
||||
loopType: for
|
||||
connections:
|
||||
loop:
|
||||
start: loop-processor
|
||||
end: summary-agent
|
||||
summary-agent:
|
||||
type: agent
|
||||
name: Agent 2
|
||||
inputs:
|
||||
systemPrompt: outside agent sys prompt
|
||||
userPrompt: |-
|
||||
outside agent user prompt:
|
||||
<loop1.results>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
loop-processor:
|
||||
type: agent
|
||||
name: Agent 1
|
||||
inputs:
|
||||
systemPrompt: loop agent sys prompt
|
||||
userPrompt: |-
|
||||
loop agent user prompt
|
||||
<loop.index>
|
||||
<loop.results>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
parentId: count-loop`,
|
||||
|
||||
'for-each-loop': `version: '1.0'
|
||||
blocks:
|
||||
start:
|
||||
type: starter
|
||||
name: Start
|
||||
inputs:
|
||||
startWorkflow: chat
|
||||
connections:
|
||||
success: foreach-loop
|
||||
foreach-loop:
|
||||
type: loop
|
||||
name: Loop 1
|
||||
inputs:
|
||||
loopType: forEach
|
||||
collection: '[''item 1'', ''item 2'', ''item 3'']'
|
||||
connections:
|
||||
loop:
|
||||
start: item-processor
|
||||
end: results-summarizer
|
||||
item-processor:
|
||||
type: agent
|
||||
name: Agent 1
|
||||
inputs:
|
||||
systemPrompt: loop agent sys prompt
|
||||
userPrompt: |-
|
||||
loop agent user prompt
|
||||
${'<'}loop.index${'>'}
|
||||
${'<'}loop.currentItem${'>'}
|
||||
${'<'}loop.items${'>'}
|
||||
${'<'}loop1.results${'>'}
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
parentId: foreach-loop
|
||||
results-summarizer:
|
||||
type: agent
|
||||
name: Agent 2
|
||||
inputs:
|
||||
systemPrompt: outside agent sys prompt
|
||||
userPrompt: |-
|
||||
outside agent user prompt:
|
||||
<loop1.results>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'`,
|
||||
|
||||
// Targeted Update Examples - for demonstrating edit_workflow tool usage patterns
|
||||
targeted_add_block: `// Example: Adding a new agent block to an existing workflow
|
||||
// Operation: Add a new block after an existing agent
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"operation_type": "add",
|
||||
"block_id": "summary-agent",
|
||||
"params": {
|
||||
"type": "agent",
|
||||
"name": "Summary Agent",
|
||||
"inputs": {
|
||||
"systemPrompt": "Summarize the conversation",
|
||||
"userPrompt": "<research-agent.response>",
|
||||
"model": "gpt-4o",
|
||||
"apiKey": "{{OPENAI_API_KEY}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "research-agent",
|
||||
"params": {
|
||||
"connections": {
|
||||
"success": "summary-agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
|
||||
targeted_edit_block: `// Example: Modifying an existing block's configuration
|
||||
// Operation: Update system prompt and add tools to an agent
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "research-agent",
|
||||
"params": {
|
||||
"inputs": {
|
||||
"systemPrompt": "You are a research assistant. Use web search to find current information.",
|
||||
"tools": [
|
||||
{
|
||||
"type": "exa",
|
||||
"title": "Exa Search",
|
||||
"toolId": "exa_search",
|
||||
"params": {
|
||||
"type": "auto",
|
||||
"apiKey": "{{EXA_API_KEY}}"
|
||||
},
|
||||
"isExpanded": true,
|
||||
"operation": "exa_search",
|
||||
"usageControl": "auto"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
|
||||
targeted_delete_block: `// Example: Removing a block and updating connections
|
||||
// Operation: Delete a block and redirect its connections
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "start",
|
||||
"params": {
|
||||
"connections": {
|
||||
"success": "final-agent"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "delete",
|
||||
"block_id": "intermediate-agent"
|
||||
}
|
||||
]
|
||||
}`,
|
||||
|
||||
targeted_add_connection: `// Example: Adding new parallel connections
|
||||
// Operation: Make one block connect to multiple agents
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"operation_type": "add",
|
||||
"block_id": "analysis-agent",
|
||||
"params": {
|
||||
"type": "agent",
|
||||
"name": "Analysis Agent",
|
||||
"inputs": {
|
||||
"systemPrompt": "Analyze the provided data",
|
||||
"userPrompt": "<research-agent.response>",
|
||||
"model": "gpt-4o",
|
||||
"apiKey": "{{OPENAI_API_KEY}}"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "research-agent",
|
||||
"params": {
|
||||
"connections": {
|
||||
"success": ["summary-agent", "analysis-agent"]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
|
||||
targeted_batch_operations: `// Example: Multiple operations in one targeted update
|
||||
// Operation: Add API block, update agent, and create new connections
|
||||
{
|
||||
"operations": [
|
||||
{
|
||||
"operation_type": "add",
|
||||
"block_id": "data-api",
|
||||
"params": {
|
||||
"type": "api",
|
||||
"name": "Data API",
|
||||
"inputs": {
|
||||
"url": "https://api.example.com/data",
|
||||
"method": "GET",
|
||||
"headers": [
|
||||
{
|
||||
"id": "auth-header",
|
||||
"cells": {
|
||||
"Key": "Authorization",
|
||||
"Value": "Bearer {{API_TOKEN}}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "processing-agent",
|
||||
"params": {
|
||||
"inputs": {
|
||||
"userPrompt": "Process this data: <data-api.response>"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "edit",
|
||||
"block_id": "start",
|
||||
"params": {
|
||||
"connections": {
|
||||
"success": "data-api"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"operation_type": "add",
|
||||
"block_id": "data-connection",
|
||||
"params": {
|
||||
"connections": {
|
||||
"success": "processing-agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`,
|
||||
}
|
||||
407
apps/sim/lib/copilot/inline-tool-call.tsx
Normal file
407
apps/sim/lib/copilot/inline-tool-call.tsx
Normal file
@@ -0,0 +1,407 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import useDrivePicker from 'react-google-drive-picker'
|
||||
import { GoogleDriveIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Card, CardContent } from '@/components/ui/card'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool'
|
||||
import { getClientTool } from '@/lib/copilot/tools/client/manager'
|
||||
import { getRegisteredTools } from '@/lib/copilot/tools/client/registry'
|
||||
import { getEnv } from '@/lib/env'
|
||||
import { CLASS_TOOL_METADATA, useCopilotStore } from '@/stores/copilot/store'
|
||||
import type { CopilotToolCall } from '@/stores/copilot/types'
|
||||
|
||||
interface InlineToolCallProps {
|
||||
toolCall?: CopilotToolCall
|
||||
toolCallId?: string
|
||||
onStateChange?: (state: any) => void
|
||||
context?: Record<string, any>
|
||||
}
|
||||
|
||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
let hasInterrupt = !!instance?.getInterruptDisplays?.()
|
||||
if (!hasInterrupt) {
|
||||
try {
|
||||
const def = getRegisteredTools()[toolCall.name]
|
||||
if (def) {
|
||||
hasInterrupt =
|
||||
typeof def.hasInterrupt === 'function'
|
||||
? !!def.hasInterrupt(toolCall.params || {})
|
||||
: !!def.hasInterrupt
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
return hasInterrupt && toolCall.state === 'pending'
|
||||
}
|
||||
|
||||
async function handleRun(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
if (!instance) return
|
||||
try {
|
||||
const mergedParams =
|
||||
(toolCall as any).params || (toolCall as any).parameters || (toolCall as any).input || {}
|
||||
await instance.handleAccept?.(mergedParams)
|
||||
onStateChange?.('executing')
|
||||
} catch (e) {
|
||||
setToolCallState(toolCall, 'errored', { error: e instanceof Error ? e.message : String(e) })
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
if (instance) {
|
||||
try {
|
||||
await instance.handleReject?.()
|
||||
} catch {}
|
||||
}
|
||||
setToolCallState(toolCall, 'rejected')
|
||||
onStateChange?.('rejected')
|
||||
}
|
||||
|
||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||
// Prefer display resolved in the copilot store (SSOT)
|
||||
const fromStore = (toolCall as any).display?.text
|
||||
if (fromStore) return fromStore
|
||||
try {
|
||||
const def = getRegisteredTools()[toolCall.name] as any
|
||||
const byState = def?.metadata?.displayNames?.[toolCall.state]
|
||||
if (byState?.text) return byState.text
|
||||
} catch {}
|
||||
return toolCall.name
|
||||
}
|
||||
|
||||
function RunSkipButtons({
|
||||
toolCall,
|
||||
onStateChange,
|
||||
}: {
|
||||
toolCall: CopilotToolCall
|
||||
onStateChange?: (state: any) => void
|
||||
}) {
|
||||
const [isProcessing, setIsProcessing] = useState(false)
|
||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||
const { setToolCallState } = useCopilotStore()
|
||||
const [openPicker] = useDrivePicker()
|
||||
|
||||
const instance = getClientTool(toolCall.id)
|
||||
const interruptDisplays = instance?.getInterruptDisplays?.()
|
||||
const acceptLabel = interruptDisplays?.accept?.text || 'Run'
|
||||
const rejectLabel = interruptDisplays?.reject?.text || 'Skip'
|
||||
|
||||
const onRun = async () => {
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleOpenDriveAccess = async () => {
|
||||
try {
|
||||
const providerId = 'google-drive'
|
||||
const credsRes = await fetch(`/api/auth/oauth/credentials?provider=${providerId}`)
|
||||
if (!credsRes.ok) return
|
||||
const credsData = await credsRes.json()
|
||||
const creds = Array.isArray(credsData.credentials) ? credsData.credentials : []
|
||||
if (creds.length === 0) return
|
||||
const defaultCred = creds.find((c: any) => c.isDefault) || creds[0]
|
||||
|
||||
const tokenRes = await fetch('/api/auth/oauth/token', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ credentialId: defaultCred.id }),
|
||||
})
|
||||
if (!tokenRes.ok) return
|
||||
const { accessToken } = await tokenRes.json()
|
||||
if (!accessToken) return
|
||||
|
||||
const clientId = getEnv('NEXT_PUBLIC_GOOGLE_CLIENT_ID') || ''
|
||||
const apiKey = getEnv('NEXT_PUBLIC_GOOGLE_API_KEY') || ''
|
||||
const projectNumber = getEnv('NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER') || ''
|
||||
|
||||
openPicker({
|
||||
clientId,
|
||||
developerKey: apiKey,
|
||||
viewId: 'DOCS',
|
||||
token: accessToken,
|
||||
showUploadView: true,
|
||||
showUploadFolders: true,
|
||||
supportDrives: true,
|
||||
multiselect: false,
|
||||
appId: projectNumber,
|
||||
setSelectFolderEnabled: false,
|
||||
callbackFunction: async (data) => {
|
||||
if (data.action === 'picked') {
|
||||
await onRun()
|
||||
}
|
||||
},
|
||||
})
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
if (toolCall.name === 'gdrive_request_access' && toolCall.state === 'pending') {
|
||||
return (
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
if (!instance) return
|
||||
await instance.handleAccept?.({
|
||||
openDrivePicker: async (accessToken: string) => {
|
||||
try {
|
||||
const clientId = getEnv('NEXT_PUBLIC_GOOGLE_CLIENT_ID') || ''
|
||||
const apiKey = getEnv('NEXT_PUBLIC_GOOGLE_API_KEY') || ''
|
||||
const projectNumber = getEnv('NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER') || ''
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
openPicker({
|
||||
clientId,
|
||||
developerKey: apiKey,
|
||||
viewId: 'DOCS',
|
||||
token: accessToken,
|
||||
showUploadView: true,
|
||||
showUploadFolders: true,
|
||||
supportDrives: true,
|
||||
multiselect: false,
|
||||
appId: projectNumber,
|
||||
setSelectFolderEnabled: false,
|
||||
callbackFunction: async (data) => {
|
||||
if (data.action === 'picked') resolve(true)
|
||||
else if (data.action === 'cancel') resolve(false)
|
||||
},
|
||||
})
|
||||
})
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
},
|
||||
})
|
||||
}}
|
||||
size='sm'
|
||||
className='h-6 bg-gray-900 px-2 font-medium text-white text-xs hover:bg-gray-800 disabled:opacity-50 dark:bg-gray-100 dark:text-gray-900 dark:hover:bg-gray-200'
|
||||
title='Grant Google Drive access'
|
||||
>
|
||||
<GoogleDriveIcon className='mr-0.5 h-4 w-4' />
|
||||
Select
|
||||
</Button>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
setButtonsHidden(true)
|
||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||
}}
|
||||
size='sm'
|
||||
className='h-6 bg-gray-200 px-2 font-medium text-gray-700 text-xs hover:bg-gray-300 disabled:opacity-50 dark:bg-gray-700 dark:text-gray-300 dark:hover:bg-gray-600'
|
||||
>
|
||||
Skip
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-1.5'>
|
||||
<Button
|
||||
onClick={onRun}
|
||||
disabled={isProcessing}
|
||||
size='sm'
|
||||
className='h-6 bg-gray-900 px-2 font-medium text-white text-xs hover:bg-gray-800 disabled:opacity-50 dark:bg-gray-100 dark:text-gray-900 dark:hover:bg-gray-200'
|
||||
>
|
||||
{isProcessing ? <Loader2 className='mr-1 h-3 w-3 animate-spin' /> : null}
|
||||
{acceptLabel}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
setButtonsHidden(true)
|
||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||
}}
|
||||
disabled={isProcessing}
|
||||
size='sm'
|
||||
className='h-6 bg-gray-200 px-2 font-medium text-gray-700 text-xs hover:bg-gray-300 disabled:opacity-50 dark:bg-gray-700 dark:text-gray-300 dark:hover:bg-gray-600'
|
||||
>
|
||||
{rejectLabel}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function InlineToolCall({
|
||||
toolCall: toolCallProp,
|
||||
toolCallId,
|
||||
onStateChange,
|
||||
context,
|
||||
}: InlineToolCallProps) {
|
||||
const [, forceUpdate] = useState({})
|
||||
const liveToolCall = useCopilotStore((s) =>
|
||||
toolCallId ? s.toolCallsById[toolCallId] : undefined
|
||||
)
|
||||
const toolCall = liveToolCall || toolCallProp
|
||||
|
||||
// Guard: nothing to render without a toolCall
|
||||
if (!toolCall) return null
|
||||
|
||||
// Skip rendering tools that are not in the registry or are explicitly omitted
|
||||
try {
|
||||
if (toolCall.name === 'checkoff_todo' || toolCall.name === 'mark_todo_in_progress') return null
|
||||
// Allow if tool id exists in CLASS_TOOL_METADATA (client tools)
|
||||
if (!CLASS_TOOL_METADATA[toolCall.name]) return null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
|
||||
const isExpandablePending =
|
||||
toolCall.state === 'pending' &&
|
||||
(toolCall.name === 'make_api_request' || toolCall.name === 'set_environment_variables')
|
||||
|
||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||
const isExpandableTool =
|
||||
toolCall.name === 'make_api_request' || toolCall.name === 'set_environment_variables'
|
||||
|
||||
const showButtons = shouldShowRunSkipButtons(toolCall)
|
||||
const showMoveToBackground =
|
||||
toolCall.name === 'run_workflow' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const handleStateChange = (state: any) => {
|
||||
forceUpdate({})
|
||||
onStateChange?.(state)
|
||||
}
|
||||
|
||||
const displayName = getDisplayName(toolCall)
|
||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||
|
||||
const Section = ({ title, children }: { title: string; children: any }) => (
|
||||
<Card className='mt-1.5'>
|
||||
<CardContent className='p-3'>
|
||||
<div className='mb-1 font-medium text-[11px] text-muted-foreground uppercase tracking-wide'>
|
||||
{title}
|
||||
</div>
|
||||
{children}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)
|
||||
|
||||
const renderPendingDetails = () => {
|
||||
if (toolCall.name === 'make_api_request') {
|
||||
const url = params.url || ''
|
||||
const method = (params.method || '').toUpperCase()
|
||||
return (
|
||||
<div className='mt-0.5 flex items-center gap-2'>
|
||||
<span className='truncate text-foreground text-xs' title={url}>
|
||||
{method ? `${method} ` : ''}
|
||||
{url || 'URL not provided'}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (toolCall.name === 'set_environment_variables') {
|
||||
const variables =
|
||||
params.variables && typeof params.variables === 'object' ? params.variables : {}
|
||||
const entries = Object.entries(variables)
|
||||
return (
|
||||
<div className='mt-0.5'>
|
||||
{entries.length === 0 ? (
|
||||
<span className='text-muted-foreground text-xs'>No variables provided</span>
|
||||
) : (
|
||||
<div className='space-y-0.5'>
|
||||
{entries.map(([k, v]) => (
|
||||
<div key={k} className='flex items-center gap-0.5'>
|
||||
<span className='font-medium text-muted-foreground text-xs'>{k}</span>
|
||||
<span className='mx-1 font-medium text-muted-foreground text-xs'>:</span>
|
||||
<span className='truncate font-medium text-foreground text-xs'>{String(v)}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
// Compute icon element from tool's display metadata (fallback to Loader2)
|
||||
const renderDisplayIcon = () => {
|
||||
try {
|
||||
// Determine the icon component (prefer store, then registry, else Loader2)
|
||||
const IconFromStore = (toolCall as any).display?.icon
|
||||
let IconComp: any | undefined = IconFromStore
|
||||
if (!IconComp) {
|
||||
try {
|
||||
const def = getRegisteredTools()[toolCall.name] as any
|
||||
IconComp = def?.metadata?.displayNames?.[toolCall.state]?.icon
|
||||
} catch {}
|
||||
}
|
||||
if (!IconComp) IconComp = Loader2
|
||||
|
||||
// Color by state
|
||||
let colorClass = ''
|
||||
const state = toolCall.state as any
|
||||
if (state === (ClientToolCallState as any).aborted || state === 'aborted') {
|
||||
colorClass = 'text-amber-500'
|
||||
} else if (state === (ClientToolCallState as any).error || state === 'error') {
|
||||
colorClass = 'text-red-500'
|
||||
} else if (state === (ClientToolCallState as any).success || state === 'success') {
|
||||
const isBuildOrEdit =
|
||||
toolCall.name === 'build_workflow' || toolCall.name === 'edit_workflow'
|
||||
colorClass = isBuildOrEdit ? 'text-[var(--brand-primary-hover-hex)]' : 'text-green-600'
|
||||
}
|
||||
|
||||
// Only Loader2 should spin
|
||||
const spinClass = IconComp === Loader2 ? 'animate-spin' : ''
|
||||
|
||||
return <IconComp className={`h-3 w-3 ${spinClass} ${colorClass}`} />
|
||||
} catch {
|
||||
return <Loader2 className='h-3 w-3 animate-spin' />
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex w-full flex-col gap-1 py-1'>
|
||||
<div
|
||||
className={`flex items-center justify-between gap-2 ${isExpandableTool ? 'cursor-pointer' : ''}`}
|
||||
onClick={() => {
|
||||
if (isExpandableTool) setExpanded((e) => !e)
|
||||
}}
|
||||
>
|
||||
<div className='flex items-center gap-2 text-muted-foreground'>
|
||||
<div className='flex-shrink-0'>{renderDisplayIcon()}</div>
|
||||
<span className='text-base'>{displayName}</span>
|
||||
</div>
|
||||
{showButtons ? (
|
||||
<RunSkipButtons toolCall={toolCall} onStateChange={handleStateChange} />
|
||||
) : showMoveToBackground ? (
|
||||
<Button
|
||||
// Intentionally minimal wiring per requirements
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
// Transition to background state locally so UI updates immediately
|
||||
instance?.setState?.((ClientToolCallState as any).background)
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
'The user has chosen to move the workflow execution to the background. Check back with them later to know when the workflow execution is complete'
|
||||
)
|
||||
// Optionally force a re-render; store should sync state from server
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
}}
|
||||
size='sm'
|
||||
className='h-6 bg-blue-600 px-2 font-medium text-white text-xs hover:bg-blue-500 disabled:opacity-50 dark:bg-blue-400 dark:text-gray-900 dark:hover:bg-blue-300'
|
||||
title='Move to Background'
|
||||
>
|
||||
Move to Background
|
||||
</Button>
|
||||
) : null}
|
||||
</div>
|
||||
{isExpandableTool && expanded && <div className='pr-1 pl-5'>{renderPendingDetails()}</div>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
310
apps/sim/lib/copilot/registry.ts
Normal file
310
apps/sim/lib/copilot/registry.ts
Normal file
@@ -0,0 +1,310 @@
|
||||
import { z } from 'zod'
|
||||
|
||||
// Tool IDs supported by the new Copilot runtime
|
||||
export const ToolIds = z.enum([
|
||||
'get_user_workflow',
|
||||
'build_workflow',
|
||||
'edit_workflow',
|
||||
'run_workflow',
|
||||
'get_workflow_console',
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
'get_block_best_practices',
|
||||
'get_build_workflow_examples',
|
||||
'get_edit_workflow_examples',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'make_api_request',
|
||||
'get_environment_variables',
|
||||
'set_environment_variables',
|
||||
'get_oauth_credentials',
|
||||
'gdrive_request_access',
|
||||
'list_gdrive_files',
|
||||
'read_gdrive_file',
|
||||
'reason',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
// Base SSE wrapper for tool_call events emitted by the LLM
|
||||
const ToolCallSSEBase = z.object({
|
||||
type: z.literal('tool_call'),
|
||||
data: z.object({
|
||||
id: z.string(),
|
||||
name: ToolIds,
|
||||
arguments: z.record(z.any()),
|
||||
partial: z.boolean().default(false),
|
||||
}),
|
||||
})
|
||||
export type ToolCallSSE = z.infer<typeof ToolCallSSEBase>
|
||||
|
||||
// Reusable small schemas
|
||||
const StringArray = z.array(z.string())
|
||||
const BooleanOptional = z.boolean().optional()
|
||||
const NumberOptional = z.number().optional()
|
||||
|
||||
// Tool argument schemas (per SSE examples provided)
|
||||
export const ToolArgSchemas = {
|
||||
get_user_workflow: z.object({}),
|
||||
|
||||
build_workflow: z.object({
|
||||
yamlContent: z.string(),
|
||||
}),
|
||||
|
||||
edit_workflow: z.object({
|
||||
operations: z
|
||||
.array(
|
||||
z.object({
|
||||
operation_type: z.enum(['add', 'edit', 'delete']),
|
||||
block_id: z.string(),
|
||||
params: z.record(z.any()).optional(),
|
||||
})
|
||||
)
|
||||
.min(1),
|
||||
}),
|
||||
|
||||
run_workflow: z.object({
|
||||
workflow_input: z.string(),
|
||||
}),
|
||||
|
||||
get_workflow_console: z.object({
|
||||
limit: NumberOptional,
|
||||
includeDetails: BooleanOptional,
|
||||
}),
|
||||
|
||||
get_blocks_and_tools: z.object({}),
|
||||
|
||||
get_blocks_metadata: z.object({
|
||||
blockIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
get_block_best_practices: z.object({
|
||||
blockIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
get_build_workflow_examples: z.object({
|
||||
exampleIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
get_edit_workflow_examples: z.object({
|
||||
exampleIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
search_documentation: z.object({
|
||||
query: z.string(),
|
||||
topK: NumberOptional,
|
||||
}),
|
||||
|
||||
search_online: z.object({
|
||||
query: z.string(),
|
||||
num: z.number().optional().default(10),
|
||||
type: z.enum(['search', 'news', 'places', 'images']).optional().default('search'),
|
||||
gl: z.string().optional(),
|
||||
hl: z.string().optional(),
|
||||
}),
|
||||
|
||||
make_api_request: z.object({
|
||||
url: z.string(),
|
||||
method: z.enum(['GET', 'POST', 'PUT']),
|
||||
queryParams: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||
headers: z.record(z.string()).optional(),
|
||||
body: z.union([z.record(z.any()), z.string()]).optional(),
|
||||
}),
|
||||
|
||||
get_environment_variables: z.object({}),
|
||||
|
||||
set_environment_variables: z.object({
|
||||
variables: z.record(z.string()),
|
||||
}),
|
||||
|
||||
get_oauth_credentials: z.object({}),
|
||||
|
||||
gdrive_request_access: z.object({}),
|
||||
|
||||
list_gdrive_files: z.object({
|
||||
search_query: z.string().optional(),
|
||||
num_results: z.number().optional().default(50),
|
||||
}),
|
||||
|
||||
read_gdrive_file: z.object({
|
||||
fileId: z.string(),
|
||||
type: z.enum(['doc', 'sheet']),
|
||||
range: z.string().optional(),
|
||||
}),
|
||||
|
||||
reason: z.object({
|
||||
reasoning: z.string(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolArgSchemaMap = typeof ToolArgSchemas
|
||||
|
||||
// Tool-specific SSE schemas (tool_call with typed arguments)
|
||||
function toolCallSSEFor<TName extends ToolId, TArgs extends z.ZodTypeAny>(
|
||||
name: TName,
|
||||
argsSchema: TArgs
|
||||
) {
|
||||
return ToolCallSSEBase.extend({
|
||||
data: ToolCallSSEBase.shape.data.extend({
|
||||
name: z.literal(name),
|
||||
arguments: argsSchema,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
export const ToolSSESchemas = {
|
||||
get_user_workflow: toolCallSSEFor('get_user_workflow', ToolArgSchemas.get_user_workflow),
|
||||
build_workflow: toolCallSSEFor('build_workflow', ToolArgSchemas.build_workflow),
|
||||
edit_workflow: toolCallSSEFor('edit_workflow', ToolArgSchemas.edit_workflow),
|
||||
run_workflow: toolCallSSEFor('run_workflow', ToolArgSchemas.run_workflow),
|
||||
get_workflow_console: toolCallSSEFor('get_workflow_console', ToolArgSchemas.get_workflow_console),
|
||||
get_blocks_and_tools: toolCallSSEFor('get_blocks_and_tools', ToolArgSchemas.get_blocks_and_tools),
|
||||
get_blocks_metadata: toolCallSSEFor('get_blocks_metadata', ToolArgSchemas.get_blocks_metadata),
|
||||
get_block_best_practices: toolCallSSEFor(
|
||||
'get_block_best_practices',
|
||||
ToolArgSchemas.get_block_best_practices
|
||||
),
|
||||
get_build_workflow_examples: toolCallSSEFor(
|
||||
'get_build_workflow_examples',
|
||||
ToolArgSchemas.get_build_workflow_examples
|
||||
),
|
||||
get_edit_workflow_examples: toolCallSSEFor(
|
||||
'get_edit_workflow_examples',
|
||||
ToolArgSchemas.get_edit_workflow_examples
|
||||
),
|
||||
search_documentation: toolCallSSEFor('search_documentation', ToolArgSchemas.search_documentation),
|
||||
search_online: toolCallSSEFor('search_online', ToolArgSchemas.search_online),
|
||||
make_api_request: toolCallSSEFor('make_api_request', ToolArgSchemas.make_api_request),
|
||||
get_environment_variables: toolCallSSEFor(
|
||||
'get_environment_variables',
|
||||
ToolArgSchemas.get_environment_variables
|
||||
),
|
||||
set_environment_variables: toolCallSSEFor(
|
||||
'set_environment_variables',
|
||||
ToolArgSchemas.set_environment_variables
|
||||
),
|
||||
get_oauth_credentials: toolCallSSEFor(
|
||||
'get_oauth_credentials',
|
||||
ToolArgSchemas.get_oauth_credentials
|
||||
),
|
||||
gdrive_request_access: toolCallSSEFor(
|
||||
'gdrive_request_access',
|
||||
ToolArgSchemas.gdrive_request_access
|
||||
),
|
||||
list_gdrive_files: toolCallSSEFor('list_gdrive_files', ToolArgSchemas.list_gdrive_files),
|
||||
read_gdrive_file: toolCallSSEFor('read_gdrive_file', ToolArgSchemas.read_gdrive_file),
|
||||
reason: toolCallSSEFor('reason', ToolArgSchemas.reason),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
// Known result schemas per tool (what tool_result.result should conform to)
|
||||
// Note: Where legacy variability exists, schema captures the common/expected shape for new runtime.
|
||||
const BuildOrEditWorkflowResult = z.object({
|
||||
yamlContent: z.string(),
|
||||
description: z.string().optional(),
|
||||
workflowState: z.unknown().optional(),
|
||||
data: z
|
||||
.object({
|
||||
blocksCount: z.number(),
|
||||
edgesCount: z.number(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
const ExecutionEntry = z.object({
|
||||
id: z.string(),
|
||||
executionId: z.string(),
|
||||
level: z.string(),
|
||||
trigger: z.string(),
|
||||
startedAt: z.string(),
|
||||
endedAt: z.string().nullable(),
|
||||
durationMs: z.number().nullable(),
|
||||
totalCost: z.number().nullable(),
|
||||
totalTokens: z.number().nullable(),
|
||||
blockExecutions: z.array(z.any()), // can be detailed per need
|
||||
output: z.any().optional(),
|
||||
})
|
||||
|
||||
export const ToolResultSchemas = {
|
||||
get_user_workflow: z.object({ yamlContent: z.string() }).or(z.string()),
|
||||
build_workflow: BuildOrEditWorkflowResult,
|
||||
edit_workflow: BuildOrEditWorkflowResult,
|
||||
run_workflow: z.object({
|
||||
executionId: z.string().optional(),
|
||||
message: z.any().optional(),
|
||||
data: z.any().optional(),
|
||||
}),
|
||||
get_workflow_console: z.object({ entries: z.array(ExecutionEntry) }),
|
||||
get_blocks_and_tools: z.object({ blocks: z.array(z.any()), tools: z.array(z.any()) }),
|
||||
get_blocks_metadata: z.object({ metadata: z.record(z.any()) }),
|
||||
get_block_best_practices: z.object({ bestPractices: z.array(z.any()) }),
|
||||
get_build_workflow_examples: z.object({
|
||||
examples: z.array(
|
||||
z.object({ id: z.string(), title: z.string().optional(), yamlContent: z.string().optional() })
|
||||
),
|
||||
}),
|
||||
get_edit_workflow_examples: z.object({
|
||||
examples: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
title: z.string().optional(),
|
||||
operations: z.array(z.any()).optional(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
search_documentation: z.object({ results: z.array(z.any()) }),
|
||||
search_online: z.object({ results: z.array(z.any()) }),
|
||||
make_api_request: z.object({
|
||||
status: z.number(),
|
||||
statusText: z.string().optional(),
|
||||
headers: z.record(z.string()).optional(),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
get_environment_variables: z.object({ variables: z.record(z.string()) }),
|
||||
set_environment_variables: z
|
||||
.object({ variables: z.record(z.string()) })
|
||||
.or(z.object({ message: z.any().optional(), data: z.any().optional() })),
|
||||
get_oauth_credentials: z.object({
|
||||
credentials: z.array(
|
||||
z.object({ id: z.string(), provider: z.string(), isDefault: z.boolean().optional() })
|
||||
),
|
||||
}),
|
||||
gdrive_request_access: z.object({
|
||||
granted: z.boolean().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
list_gdrive_files: z.object({
|
||||
files: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string().optional(),
|
||||
mimeType: z.string().optional(),
|
||||
size: z.number().optional(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
read_gdrive_file: z.object({ content: z.string().optional(), data: z.any().optional() }),
|
||||
reason: z.object({ reasoning: z.string() }),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
// Consolidated registry entry per tool
|
||||
export const ToolRegistry = Object.freeze(
|
||||
(Object.keys(ToolArgSchemas) as ToolId[]).reduce(
|
||||
(acc, toolId) => {
|
||||
const args = (ToolArgSchemas as any)[toolId] as z.ZodTypeAny
|
||||
const sse = (ToolSSESchemas as any)[toolId] as z.ZodTypeAny
|
||||
const result = (ToolResultSchemas as any)[toolId] as z.ZodTypeAny
|
||||
acc[toolId] = { id: toolId, args, sse, result }
|
||||
return acc
|
||||
},
|
||||
{} as Record<
|
||||
ToolId,
|
||||
{ id: ToolId; args: z.ZodTypeAny; sse: z.ZodTypeAny; result: z.ZodTypeAny }
|
||||
>
|
||||
)
|
||||
)
|
||||
export type ToolRegistryMap = typeof ToolRegistry
|
||||
|
||||
// Convenience helper types inferred from schemas
|
||||
export type InferArgs<T extends ToolId> = z.infer<(typeof ToolArgSchemas)[T]>
|
||||
export type InferResult<T extends ToolId> = z.infer<(typeof ToolResultSchemas)[T]>
|
||||
export type InferToolCallSSE<T extends ToolId> = z.infer<(typeof ToolSSESchemas)[T]>
|
||||
@@ -1,144 +0,0 @@
|
||||
/**
|
||||
* Base class for all copilot tools
|
||||
*/
|
||||
|
||||
import type {
|
||||
CopilotToolCall,
|
||||
Tool,
|
||||
ToolConfirmResponse,
|
||||
ToolExecuteResult,
|
||||
ToolExecutionOptions,
|
||||
ToolMetadata,
|
||||
ToolState,
|
||||
} from '@/lib/copilot/tools/types'
|
||||
|
||||
export abstract class BaseTool implements Tool {
|
||||
// Static property for tool ID - must be overridden by each tool
|
||||
static readonly id: string
|
||||
|
||||
// Instance property for metadata
|
||||
abstract metadata: ToolMetadata
|
||||
|
||||
/**
|
||||
* Notify the backend about the tool state change
|
||||
*/
|
||||
protected async notify(
|
||||
toolCallId: string,
|
||||
state: ToolState,
|
||||
message?: string
|
||||
): Promise<ToolConfirmResponse> {
|
||||
try {
|
||||
// Map ToolState to NotificationStatus for API
|
||||
const notificationStatus = state === 'errored' ? 'error' : state
|
||||
|
||||
const response = await fetch('/api/copilot/confirm', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
toolCallId,
|
||||
status: notificationStatus,
|
||||
message,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
console.error(`Failed to confirm tool ${toolCallId}:`, error)
|
||||
return { success: false, message: error.error || 'Failed to confirm tool' }
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return { success: true, message: result.message }
|
||||
} catch (error) {
|
||||
console.error('Error confirming tool:', error)
|
||||
return { success: false, message: error instanceof Error ? error.message : 'Unknown error' }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool - must be implemented by each tool
|
||||
*/
|
||||
abstract execute(
|
||||
toolCall: CopilotToolCall,
|
||||
options?: ToolExecutionOptions
|
||||
): Promise<ToolExecuteResult>
|
||||
|
||||
/**
|
||||
* Get the display name for the current state
|
||||
*/
|
||||
getDisplayName(toolCall: CopilotToolCall): string {
|
||||
const { state, parameters = {} } = toolCall
|
||||
const { displayConfig } = this.metadata
|
||||
|
||||
// First try dynamic display name if available
|
||||
if (displayConfig.getDynamicDisplayName) {
|
||||
const dynamicName = displayConfig.getDynamicDisplayName(state, parameters)
|
||||
if (dynamicName) return dynamicName
|
||||
}
|
||||
|
||||
// Then try state-specific display name
|
||||
const stateConfig = displayConfig.states[state]
|
||||
if (stateConfig?.displayName) {
|
||||
return stateConfig.displayName
|
||||
}
|
||||
|
||||
// Fallback to a generic state name
|
||||
return `${this.metadata.id} (${state})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the icon for the current state
|
||||
*/
|
||||
getIcon(toolCall: CopilotToolCall): string {
|
||||
const { state } = toolCall
|
||||
const stateConfig = this.metadata.displayConfig.states[state]
|
||||
|
||||
// Return state-specific icon or default
|
||||
return stateConfig?.icon || 'default'
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if tool requires confirmation in current state
|
||||
*/
|
||||
requiresConfirmation(toolCall: CopilotToolCall): boolean {
|
||||
// Only show confirmation UI if tool requires interrupt and is in pending state
|
||||
return this.metadata.requiresInterrupt && toolCall.state === 'pending'
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle user action (run/skip/background)
|
||||
*/
|
||||
async handleUserAction(
|
||||
toolCall: CopilotToolCall,
|
||||
action: 'run' | 'skip' | 'background',
|
||||
options?: ToolExecutionOptions
|
||||
): Promise<void> {
|
||||
// Map actions to states
|
||||
const actionToState: Record<string, ToolState> = {
|
||||
run: 'executing', // Changed from 'accepted' to 'executing'
|
||||
skip: 'rejected',
|
||||
background: 'background',
|
||||
}
|
||||
|
||||
const newState = actionToState[action]
|
||||
|
||||
// Update state locally
|
||||
options?.onStateChange?.(newState)
|
||||
|
||||
// Special handling for run action
|
||||
if (action === 'run') {
|
||||
// Directly call execute method - no wrapper
|
||||
await this.execute(toolCall, options)
|
||||
} else {
|
||||
// For skip/background, just notify
|
||||
const message =
|
||||
action === 'skip'
|
||||
? this.getDisplayName({ ...toolCall, state: 'rejected' })
|
||||
: 'The user moved execution to the background'
|
||||
|
||||
await this.notify(toolCall.id, newState, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
import { BaseTool } from '@/lib/copilot/tools/base-tool'
|
||||
import type {
|
||||
CopilotToolCall,
|
||||
ToolExecuteResult,
|
||||
ToolExecutionOptions,
|
||||
ToolMetadata,
|
||||
} from '@/lib/copilot/tools/types'
|
||||
|
||||
export class GDriveRequestAccessTool extends BaseTool {
|
||||
static readonly id = 'gdrive_request_access'
|
||||
|
||||
metadata: ToolMetadata = {
|
||||
id: GDriveRequestAccessTool.id,
|
||||
displayConfig: {
|
||||
states: {
|
||||
pending: {
|
||||
displayName: 'Select Google Drive files',
|
||||
icon: 'googleDrive',
|
||||
},
|
||||
executing: {
|
||||
displayName: 'Requesting Google Drive access',
|
||||
icon: 'spinner',
|
||||
},
|
||||
accepted: {
|
||||
displayName: 'Requesting Google Drive access',
|
||||
icon: 'spinner',
|
||||
},
|
||||
success: {
|
||||
displayName: 'Selected Google Drive files',
|
||||
icon: 'googleDrive',
|
||||
},
|
||||
rejected: {
|
||||
displayName: 'Skipped Google Drive access request',
|
||||
icon: 'skip',
|
||||
},
|
||||
errored: {
|
||||
displayName: 'Failed to request Google Drive access',
|
||||
icon: 'error',
|
||||
},
|
||||
},
|
||||
},
|
||||
schema: {
|
||||
name: GDriveRequestAccessTool.id,
|
||||
description: 'Prompt the user to grant Google Drive file access via the picker',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
// Accepts arbitrary context but no required params
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
requiresInterrupt: true,
|
||||
}
|
||||
|
||||
async execute(
|
||||
toolCall: CopilotToolCall,
|
||||
options?: ToolExecutionOptions
|
||||
): Promise<ToolExecuteResult> {
|
||||
// Execution is trivial: we only notify the server that the user completed the action.
|
||||
// Any data transfer happens via the picker; if needed later, it can be included in the message.
|
||||
await this.notify(toolCall.id, 'success', 'User completed Google Drive access picker')
|
||||
options?.onStateChange?.('success')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Google Drive access confirmed by user',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,306 +0,0 @@
|
||||
/**
|
||||
* Get User Workflow Tool - Client-side implementation
|
||||
*/
|
||||
|
||||
import { BaseTool } from '@/lib/copilot/tools/base-tool'
|
||||
import type {
|
||||
CopilotToolCall,
|
||||
ToolExecuteResult,
|
||||
ToolExecutionOptions,
|
||||
ToolMetadata,
|
||||
} from '@/lib/copilot/tools/types'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface GetUserWorkflowParams {
|
||||
workflowId?: string
|
||||
includeMetadata?: boolean
|
||||
}
|
||||
|
||||
export class GetUserWorkflowTool extends BaseTool {
|
||||
static readonly id = 'get_user_workflow'
|
||||
|
||||
metadata: ToolMetadata = {
|
||||
id: GetUserWorkflowTool.id,
|
||||
displayConfig: {
|
||||
states: {
|
||||
executing: {
|
||||
displayName: 'Analyzing your workflow',
|
||||
icon: 'spinner',
|
||||
},
|
||||
accepted: {
|
||||
displayName: 'Analyzing your workflow',
|
||||
icon: 'spinner',
|
||||
},
|
||||
success: {
|
||||
displayName: 'Workflow analyzed',
|
||||
icon: 'workflow',
|
||||
},
|
||||
rejected: {
|
||||
displayName: 'Skipped workflow analysis',
|
||||
icon: 'skip',
|
||||
},
|
||||
errored: {
|
||||
displayName: 'Failed to analyze workflow',
|
||||
icon: 'error',
|
||||
},
|
||||
aborted: {
|
||||
displayName: 'Aborted workflow analysis',
|
||||
icon: 'abort',
|
||||
},
|
||||
},
|
||||
},
|
||||
schema: {
|
||||
name: GetUserWorkflowTool.id,
|
||||
description: 'Get the current workflow state as JSON',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description:
|
||||
'The ID of the workflow to fetch (optional, uses active workflow if not provided)',
|
||||
},
|
||||
includeMetadata: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to include workflow metadata',
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
requiresInterrupt: false, // Client tools handle their own interrupts
|
||||
stateMessages: {
|
||||
success: 'Successfully retrieved workflow',
|
||||
error: 'Failed to retrieve workflow',
|
||||
rejected: 'User chose to skip workflow retrieval',
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool - fetch the workflow from stores and write to Redis
|
||||
*/
|
||||
async execute(
|
||||
toolCall: CopilotToolCall,
|
||||
options?: ToolExecutionOptions
|
||||
): Promise<ToolExecuteResult> {
|
||||
const logger = createLogger('GetUserWorkflowTool')
|
||||
|
||||
logger.info('Starting client tool execution', {
|
||||
toolCallId: toolCall.id,
|
||||
toolName: toolCall.name,
|
||||
})
|
||||
|
||||
try {
|
||||
// Parse parameters
|
||||
const rawParams = toolCall.parameters || toolCall.input || {}
|
||||
const params = rawParams as GetUserWorkflowParams
|
||||
|
||||
// Get workflow ID - use provided or active workflow
|
||||
let workflowId = params.workflowId
|
||||
if (!workflowId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (!activeWorkflowId) {
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: 'No active workflow found',
|
||||
}
|
||||
}
|
||||
workflowId = activeWorkflowId
|
||||
}
|
||||
|
||||
logger.info('Fetching user workflow from stores', {
|
||||
workflowId,
|
||||
includeMetadata: params.includeMetadata,
|
||||
})
|
||||
|
||||
// Try to get workflow from diff/preview store first, then main store
|
||||
let workflowState: any = null
|
||||
|
||||
// Check diff store first
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
if (diffStore.diffWorkflow && Object.keys(diffStore.diffWorkflow.blocks || {}).length > 0) {
|
||||
workflowState = diffStore.diffWorkflow
|
||||
logger.info('Using workflow from diff/preview store', { workflowId })
|
||||
} else {
|
||||
// Get the actual workflow state from the workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const fullWorkflowState = workflowStore.getWorkflowState()
|
||||
|
||||
if (!fullWorkflowState || !fullWorkflowState.blocks) {
|
||||
// Fallback to workflow registry metadata if no workflow state
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
const workflow = workflowRegistry.workflows[workflowId]
|
||||
|
||||
if (!workflow) {
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: `Workflow ${workflowId} not found in any store`,
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('No workflow state found, using workflow metadata only', { workflowId })
|
||||
workflowState = workflow
|
||||
} else {
|
||||
workflowState = fullWorkflowState
|
||||
logger.info('Using workflow state from workflow store', {
|
||||
workflowId,
|
||||
blockCount: Object.keys(fullWorkflowState.blocks || {}).length,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure workflow state has all required properties with proper defaults
|
||||
if (workflowState) {
|
||||
if (!workflowState.loops) {
|
||||
workflowState.loops = {}
|
||||
}
|
||||
if (!workflowState.parallels) {
|
||||
workflowState.parallels = {}
|
||||
}
|
||||
if (!workflowState.edges) {
|
||||
workflowState.edges = []
|
||||
}
|
||||
if (!workflowState.blocks) {
|
||||
workflowState.blocks = {}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge latest subblock values from the subblock store so subblock edits are reflected
|
||||
try {
|
||||
if (workflowState?.blocks) {
|
||||
workflowState = {
|
||||
...workflowState,
|
||||
blocks: mergeSubblockState(workflowState.blocks, workflowId),
|
||||
}
|
||||
logger.info('Merged subblock values into workflow state', {
|
||||
workflowId,
|
||||
blockCount: Object.keys(workflowState.blocks || {}).length,
|
||||
})
|
||||
}
|
||||
} catch (mergeError) {
|
||||
logger.warn('Failed to merge subblock values; proceeding with raw workflow state', {
|
||||
workflowId,
|
||||
error: mergeError instanceof Error ? mergeError.message : String(mergeError),
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('Validating workflow state', {
|
||||
workflowId,
|
||||
hasWorkflowState: !!workflowState,
|
||||
hasBlocks: !!workflowState?.blocks,
|
||||
workflowStateType: typeof workflowState,
|
||||
})
|
||||
|
||||
if (!workflowState || !workflowState.blocks) {
|
||||
logger.error('Workflow state validation failed', {
|
||||
workflowId,
|
||||
workflowState: workflowState,
|
||||
hasBlocks: !!workflowState?.blocks,
|
||||
})
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: 'Workflow state is empty or invalid',
|
||||
}
|
||||
}
|
||||
|
||||
// Include metadata if requested and available
|
||||
if (params.includeMetadata && workflowState.metadata) {
|
||||
// Metadata is already included in the workflow state
|
||||
}
|
||||
|
||||
logger.info('Successfully fetched user workflow from stores', {
|
||||
workflowId,
|
||||
blockCount: Object.keys(workflowState.blocks || {}).length,
|
||||
fromDiffStore:
|
||||
!!diffStore.diffWorkflow && Object.keys(diffStore.diffWorkflow.blocks || {}).length > 0,
|
||||
})
|
||||
|
||||
logger.info('About to stringify workflow state', {
|
||||
workflowId,
|
||||
workflowStateKeys: Object.keys(workflowState),
|
||||
})
|
||||
|
||||
// Convert workflow state to JSON string
|
||||
let workflowJson: string
|
||||
try {
|
||||
workflowJson = JSON.stringify(workflowState, null, 2)
|
||||
logger.info('Successfully stringified workflow state', {
|
||||
workflowId,
|
||||
jsonLength: workflowJson.length,
|
||||
})
|
||||
} catch (stringifyError) {
|
||||
logger.error('Error stringifying workflow state', {
|
||||
workflowId,
|
||||
error: stringifyError,
|
||||
})
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to convert workflow to JSON: ${stringifyError instanceof Error ? stringifyError.message : 'Unknown error'}`,
|
||||
}
|
||||
}
|
||||
logger.info('About to notify server with workflow data', {
|
||||
workflowId,
|
||||
toolCallId: toolCall.id,
|
||||
dataLength: workflowJson.length,
|
||||
})
|
||||
|
||||
// Notify server of success with structured data containing userWorkflow
|
||||
const structuredData = JSON.stringify({
|
||||
userWorkflow: workflowJson,
|
||||
})
|
||||
|
||||
logger.info('Calling notify with structured data', {
|
||||
toolCallId: toolCall.id,
|
||||
structuredDataLength: structuredData.length,
|
||||
})
|
||||
|
||||
await this.notify(toolCall.id, 'success', structuredData)
|
||||
|
||||
logger.info('Successfully notified server of success', {
|
||||
toolCallId: toolCall.id,
|
||||
})
|
||||
|
||||
options?.onStateChange?.('success')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: workflowJson, // Return the same data that goes to Redis
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error in client tool execution:', {
|
||||
toolCallId: toolCall.id,
|
||||
error: error,
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
|
||||
try {
|
||||
// Notify server of error
|
||||
await this.notify(toolCall.id, 'errored', error.message || 'Failed to fetch workflow')
|
||||
logger.info('Successfully notified server of error', {
|
||||
toolCallId: toolCall.id,
|
||||
})
|
||||
} catch (notifyError) {
|
||||
logger.error('Failed to notify server of error:', {
|
||||
toolCallId: toolCall.id,
|
||||
notifyError: notifyError,
|
||||
})
|
||||
}
|
||||
|
||||
options?.onStateChange?.('errored')
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: error.message || 'Failed to fetch workflow',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,218 +0,0 @@
|
||||
/**
|
||||
* Run Workflow Tool
|
||||
*/
|
||||
|
||||
import { BaseTool } from '@/lib/copilot/tools/base-tool'
|
||||
import type {
|
||||
CopilotToolCall,
|
||||
ToolExecuteResult,
|
||||
ToolExecutionOptions,
|
||||
ToolMetadata,
|
||||
} from '@/lib/copilot/tools/types'
|
||||
import { executeWorkflowWithFullLogging } from '@/app/workspace/[workspaceId]/w/[workflowId]/lib/workflow-execution-utils'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface RunWorkflowParams {
|
||||
workflowId?: string
|
||||
description?: string
|
||||
workflow_input?: string
|
||||
}
|
||||
|
||||
export class RunWorkflowTool extends BaseTool {
|
||||
static readonly id = 'run_workflow'
|
||||
|
||||
metadata: ToolMetadata = {
|
||||
id: RunWorkflowTool.id,
|
||||
displayConfig: {
|
||||
states: {
|
||||
pending: {
|
||||
displayName: 'Run workflow?',
|
||||
icon: 'play',
|
||||
},
|
||||
executing: {
|
||||
displayName: 'Executing workflow',
|
||||
icon: 'spinner',
|
||||
},
|
||||
accepted: {
|
||||
displayName: 'Executing workflow',
|
||||
icon: 'spinner',
|
||||
},
|
||||
success: {
|
||||
displayName: 'Executed workflow',
|
||||
icon: 'play',
|
||||
},
|
||||
rejected: {
|
||||
displayName: 'Skipped workflow execution',
|
||||
icon: 'skip',
|
||||
},
|
||||
errored: {
|
||||
displayName: 'Failed to execute workflow',
|
||||
icon: 'error',
|
||||
},
|
||||
background: {
|
||||
displayName: 'Workflow execution moved to background',
|
||||
icon: 'play',
|
||||
},
|
||||
aborted: {
|
||||
displayName: 'Aborted stream',
|
||||
icon: 'abort',
|
||||
},
|
||||
},
|
||||
},
|
||||
schema: {
|
||||
name: RunWorkflowTool.id,
|
||||
description: 'Execute a workflow with optional input',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'The ID of the workflow to run',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
description: 'Description of what the workflow does',
|
||||
},
|
||||
workflow_input: {
|
||||
type: 'string',
|
||||
description: 'Input text to pass to the workflow chat',
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
requiresInterrupt: true,
|
||||
allowBackgroundExecution: true,
|
||||
stateMessages: {
|
||||
success: 'Workflow successfully executed',
|
||||
background:
|
||||
'User moved workflow exectuion to background. The workflow execution is not complete, but will continue to run in the background.',
|
||||
error: 'Error during workflow execution',
|
||||
rejected: 'The user chose to skip the workflow execution',
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool - run the workflow
|
||||
* This includes showing a background prompt and handling background vs foreground execution
|
||||
*/
|
||||
async execute(
|
||||
toolCall: CopilotToolCall,
|
||||
options?: ToolExecutionOptions
|
||||
): Promise<ToolExecuteResult> {
|
||||
try {
|
||||
// Parse parameters from either toolCall.parameters or toolCall.input
|
||||
const rawParams = toolCall.parameters || toolCall.input || {}
|
||||
const params = rawParams as RunWorkflowParams
|
||||
|
||||
// Check if workflow is already executing
|
||||
const { isExecuting } = useExecutionStore.getState()
|
||||
if (isExecuting) {
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: 'The workflow is already in the middle of an execution. Try again later',
|
||||
}
|
||||
}
|
||||
|
||||
// Get current workflow and execution context
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (!activeWorkflowId) {
|
||||
options?.onStateChange?.('errored')
|
||||
return {
|
||||
success: false,
|
||||
error: 'No active workflow found',
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare workflow input - if workflow_input is provided, pass it to the execution
|
||||
const workflowInput = params.workflow_input
|
||||
? {
|
||||
input: params.workflow_input,
|
||||
}
|
||||
: undefined
|
||||
|
||||
// Set execution state
|
||||
const { setIsExecuting } = useExecutionStore.getState()
|
||||
setIsExecuting(true)
|
||||
|
||||
// Note: toolCall.state is already set to 'executing' by clientAcceptTool
|
||||
|
||||
// Capture the execution timestamp
|
||||
const executionStartTime = new Date().toISOString()
|
||||
|
||||
// Store execution start time in context for background notifications
|
||||
if (options?.context) {
|
||||
options.context.executionStartTime = executionStartTime
|
||||
}
|
||||
|
||||
// Use the standalone execution utility with full logging support
|
||||
// This works for both deployed and non-deployed workflows
|
||||
const result = await executeWorkflowWithFullLogging({
|
||||
workflowInput,
|
||||
executionId: toolCall.id, // Use tool call ID as execution ID
|
||||
})
|
||||
|
||||
// Reset execution state
|
||||
setIsExecuting(false)
|
||||
|
||||
// Check if execution was successful
|
||||
if (result && (!('success' in result) || result.success !== false)) {
|
||||
// Notify server of success with execution timestamp
|
||||
await this.notify(
|
||||
toolCall.id,
|
||||
'success',
|
||||
`Workflow execution completed successfully. Started at: ${executionStartTime}`
|
||||
)
|
||||
|
||||
options?.onStateChange?.('success')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
workflowId: params.workflowId || activeWorkflowId,
|
||||
description: params.description,
|
||||
message: 'Workflow execution finished successfully',
|
||||
},
|
||||
}
|
||||
}
|
||||
// Execution failed
|
||||
const errorMessage = (result as any)?.error || 'Workflow execution failed'
|
||||
const failedDependency = (result as any)?.failedDependency
|
||||
|
||||
// Check if failedDependency is true to notify 'rejected' instead of 'errored'
|
||||
const targetState = failedDependency === true ? 'rejected' : 'errored'
|
||||
const message =
|
||||
targetState === 'rejected'
|
||||
? `Workflow execution skipped (failed dependency): ${errorMessage}`
|
||||
: `Workflow execution failed: ${errorMessage}`
|
||||
await this.notify(toolCall.id, targetState, message)
|
||||
|
||||
options?.onStateChange?.(targetState)
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
}
|
||||
} catch (error: any) {
|
||||
// Reset execution state in case of error
|
||||
const { setIsExecuting } = useExecutionStore.getState()
|
||||
setIsExecuting(false)
|
||||
|
||||
const errorMessage = error?.message || 'An unknown error occurred'
|
||||
const failedDependency = error?.failedDependency
|
||||
|
||||
// Check if failedDependency is true to notify 'rejected' instead of 'errored'
|
||||
const targetState = failedDependency === true ? 'rejected' : 'errored'
|
||||
await this.notify(toolCall.id, targetState, `Workflow execution failed: ${errorMessage}`)
|
||||
|
||||
options?.onStateChange?.(targetState)
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
148
apps/sim/lib/copilot/tools/client/base-tool.ts
Normal file
148
apps/sim/lib/copilot/tools/client/base-tool.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
// Lazy require in setState to avoid circular init issues
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const baseToolLogger = createLogger('BaseClientTool')
|
||||
|
||||
// Client tool call states used by the new runtime
|
||||
export enum ClientToolCallState {
|
||||
generating = 'generating',
|
||||
pending = 'pending',
|
||||
executing = 'executing',
|
||||
aborted = 'aborted',
|
||||
rejected = 'rejected',
|
||||
success = 'success',
|
||||
error = 'error',
|
||||
review = 'review',
|
||||
background = 'background',
|
||||
}
|
||||
|
||||
// Display configuration for a given state
|
||||
export interface ClientToolDisplay {
|
||||
text: string
|
||||
icon: LucideIcon
|
||||
}
|
||||
|
||||
export interface BaseClientToolMetadata {
|
||||
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
||||
interrupt?: {
|
||||
accept: ClientToolDisplay
|
||||
reject: ClientToolDisplay
|
||||
}
|
||||
}
|
||||
|
||||
export class BaseClientTool {
|
||||
readonly toolCallId: string
|
||||
readonly name: string
|
||||
protected state: ClientToolCallState
|
||||
protected metadata: BaseClientToolMetadata
|
||||
|
||||
constructor(toolCallId: string, name: string, metadata: BaseClientToolMetadata) {
|
||||
this.toolCallId = toolCallId
|
||||
this.name = name
|
||||
this.metadata = metadata
|
||||
this.state = ClientToolCallState.generating
|
||||
}
|
||||
|
||||
// Intentionally left empty - specific tools can override
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async execute(_args?: Record<string, any>): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark a tool as complete on the server (proxies to server-side route)
|
||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
||||
try {
|
||||
baseToolLogger.info('markToolComplete called', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
state: this.state,
|
||||
status,
|
||||
hasMessage: message !== undefined,
|
||||
hasData: data !== undefined,
|
||||
})
|
||||
} catch {}
|
||||
try {
|
||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
id: this.toolCallId,
|
||||
name: this.name,
|
||||
status,
|
||||
message,
|
||||
data,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
// Try to surface server error
|
||||
let errorText = `Failed to mark tool complete (status ${res.status})`
|
||||
try {
|
||||
const { error } = await res.json()
|
||||
if (error) errorText = String(error)
|
||||
} catch {}
|
||||
throw new Error(errorText)
|
||||
}
|
||||
|
||||
const json = (await res.json()) as { success?: boolean }
|
||||
return json?.success === true
|
||||
} catch (e) {
|
||||
// Default failure path
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Accept (continue) for interrupt flows: move pending -> executing
|
||||
async handleAccept(): Promise<void> {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
}
|
||||
|
||||
// Reject (skip) for interrupt flows: mark complete with a standard skip message
|
||||
async handleReject(): Promise<void> {
|
||||
await this.markToolComplete(200, 'Tool execution was skipped by the user')
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
// Return the display configuration for the current state
|
||||
getDisplayState(): ClientToolDisplay | undefined {
|
||||
return this.metadata.displayNames[this.state]
|
||||
}
|
||||
|
||||
// Return interrupt display config (labels/icons) if defined
|
||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
||||
return this.metadata.interrupt
|
||||
}
|
||||
|
||||
// Transition to a new state (also sync to Copilot store)
|
||||
setState(next: ClientToolCallState, options?: { result?: any }): void {
|
||||
const prev = this.state
|
||||
this.state = next
|
||||
|
||||
// Notify store via manager to avoid import cycles
|
||||
try {
|
||||
const { syncToolState } = require('@/lib/copilot/tools/client/manager')
|
||||
syncToolState(this.toolCallId, next, options)
|
||||
} catch {}
|
||||
|
||||
// Log transition after syncing
|
||||
try {
|
||||
baseToolLogger.info('setState transition', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
prev,
|
||||
next,
|
||||
hasResult: options?.result !== undefined,
|
||||
})
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Expose current state
|
||||
getState(): ClientToolCallState {
|
||||
return this.state
|
||||
}
|
||||
|
||||
hasInterrupt(): boolean {
|
||||
return !!this.metadata.interrupt
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
import { Blocks, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import {
|
||||
ExecuteResponseSuccessSchema,
|
||||
GetBlocksAndToolsResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_blocks_and_tools'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GetBlocksAndToolsClientTool.id, GetBlocksAndToolsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
const logger = createLogger('GetBlocksAndToolsClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_blocks_and_tools', payload: {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = GetBlocksAndToolsResult.parse(parsed.result)
|
||||
|
||||
await this.markToolComplete(200, 'Successfully retrieved blocks and tools', result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import {
|
||||
ExecuteResponseSuccessSchema,
|
||||
GetBlocksMetadataInput,
|
||||
GetBlocksMetadataResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface GetBlocksMetadataArgs {
|
||||
blockIds: string[]
|
||||
}
|
||||
|
||||
export class GetBlocksMetadataClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_blocks_metadata'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GetBlocksMetadataClientTool.id, GetBlocksMetadataClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Evaluated block choices', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to evaluate block choices', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted evaluating block choices', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped evaluating block choices',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetBlocksMetadataArgs): Promise<void> {
|
||||
const logger = createLogger('GetBlocksMetadataClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const { blockIds } = GetBlocksMetadataInput.parse(args || {})
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_blocks_metadata', payload: { blockIds } }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = GetBlocksMetadataResult.parse(parsed.result)
|
||||
|
||||
await this.markToolComplete(200, { retrieved: Object.keys(result.metadata).length }, result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
69
apps/sim/lib/copilot/tools/client/gdrive/list-files.ts
Normal file
69
apps/sim/lib/copilot/tools/client/gdrive/list-files.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { FolderOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface ListGDriveFilesArgs {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
search_query?: string
|
||||
searchQuery?: string
|
||||
num_results?: number
|
||||
}
|
||||
|
||||
export class ListGDriveFilesClientTool extends BaseClientTool {
|
||||
static readonly id = 'list_gdrive_files'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, ListGDriveFilesClientTool.id, ListGDriveFilesClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Listing GDrive files', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Listing GDrive files', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Listing GDrive files', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Listed GDrive files', icon: FolderOpen },
|
||||
[ClientToolCallState.error]: { text: 'Failed to list GDrive files', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped listing GDrive files', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: ListGDriveFilesArgs): Promise<void> {
|
||||
const logger = createLogger('ListGDriveFilesClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Ensure server can resolve userId via workflowId if userId not provided
|
||||
const payload: ListGDriveFilesArgs = { ...(args || {}) }
|
||||
if (!payload.userId && !payload.workflowId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
}
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'list_gdrive_files', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Listed Google Drive files', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to list Google Drive files')
|
||||
}
|
||||
}
|
||||
}
|
||||
63
apps/sim/lib/copilot/tools/client/gdrive/read-file.ts
Normal file
63
apps/sim/lib/copilot/tools/client/gdrive/read-file.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { FileText, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface ReadGDriveFileArgs {
|
||||
userId: string
|
||||
fileId: string
|
||||
type: 'doc' | 'sheet'
|
||||
range?: string
|
||||
}
|
||||
|
||||
export class ReadGDriveFileClientTool extends BaseClientTool {
|
||||
static readonly id = 'read_gdrive_file'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, ReadGDriveFileClientTool.id, ReadGDriveFileClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading Google Drive file', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading Google Drive file', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Reading Google Drive file', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Read Google Drive file', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read Google Drive file', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading Google Drive file', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped reading Google Drive file',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: ReadGDriveFileArgs): Promise<void> {
|
||||
const logger = createLogger('ReadGDriveFileClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'read_gdrive_file', payload: args || {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Read Google Drive file', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to read Google Drive file')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
import { CheckCircle, FolderOpen, Loader2, MinusCircle, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface GDriveAcceptContext {
|
||||
openDrivePicker: (accessToken: string) => Promise<boolean>
|
||||
}
|
||||
|
||||
export class GDriveRequestAccessClientTool extends BaseClientTool {
|
||||
static readonly id = 'gdrive_request_access'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GDriveRequestAccessClientTool.id, GDriveRequestAccessClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Requesting GDrive access', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Requesting GDrive access', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Requesting GDrive access', icon: Loader2 },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped GDrive access', icon: MinusCircle },
|
||||
[ClientToolCallState.success]: { text: 'GDrive access granted', icon: CheckCircle },
|
||||
[ClientToolCallState.error]: { text: 'Failed to request GDrive access', icon: X },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted GDrive access request', icon: XCircle },
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Select', icon: FolderOpen },
|
||||
reject: { text: 'Skip', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
// Accept flow: fetch creds/token, then call provided openDrivePicker to get grant
|
||||
async handleAccept(ctx?: GDriveAcceptContext): Promise<void> {
|
||||
const logger = createLogger('GDriveRequestAccessClientTool')
|
||||
logger.debug('handleAccept() called', { toolCallId: this.toolCallId })
|
||||
|
||||
if (!ctx?.openDrivePicker) {
|
||||
logger.error('openDrivePicker callback not provided')
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(400, 'Missing drive picker context')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Fetch credentials list
|
||||
const credsRes = await fetch(`/api/auth/oauth/credentials?provider=google-drive`)
|
||||
if (!credsRes.ok) {
|
||||
throw new Error(`Failed to load OAuth credentials (${credsRes.status})`)
|
||||
}
|
||||
const credsData = await credsRes.json()
|
||||
const creds = Array.isArray(credsData.credentials) ? credsData.credentials : []
|
||||
if (creds.length === 0) {
|
||||
throw new Error('No OAuth credentials found')
|
||||
}
|
||||
const defaultCred = creds.find((c: any) => c.isDefault) || creds[0]
|
||||
|
||||
// Exchange for access token
|
||||
const tokenRes = await fetch('/api/auth/oauth/token', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ credentialId: defaultCred.id }),
|
||||
})
|
||||
if (!tokenRes.ok) {
|
||||
throw new Error(`Failed to fetch access token (${tokenRes.status})`)
|
||||
}
|
||||
const { accessToken } = await tokenRes.json()
|
||||
if (!accessToken) {
|
||||
throw new Error('Missing access token in response')
|
||||
}
|
||||
|
||||
// Open picker using provided UI callback
|
||||
const picked = await ctx.openDrivePicker(accessToken)
|
||||
if (!picked) {
|
||||
// User canceled
|
||||
await this.markToolComplete(200, 'Tool execution was skipped by the user')
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
return
|
||||
}
|
||||
|
||||
// Mark success
|
||||
await this.markToolComplete(200, { granted: true })
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async execute(args?: any): Promise<void> {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
28
apps/sim/lib/copilot/tools/client/manager.ts
Normal file
28
apps/sim/lib/copilot/tools/client/manager.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
const instances: Record<string, any> = {}
|
||||
|
||||
let syncStateFn: ((toolCallId: string, nextState: any, options?: { result?: any }) => void) | null =
|
||||
null
|
||||
|
||||
export function registerClientTool(toolCallId: string, instance: any) {
|
||||
instances[toolCallId] = instance
|
||||
}
|
||||
|
||||
export function getClientTool(toolCallId: string): any | undefined {
|
||||
return instances[toolCallId]
|
||||
}
|
||||
|
||||
export function unregisterClientTool(toolCallId: string) {
|
||||
delete instances[toolCallId]
|
||||
}
|
||||
|
||||
export function registerToolStateSync(
|
||||
fn: (toolCallId: string, nextState: any, options?: { result?: any }) => void
|
||||
) {
|
||||
syncStateFn = fn
|
||||
}
|
||||
|
||||
export function syncToolState(toolCallId: string, nextState: any, options?: { result?: any }) {
|
||||
try {
|
||||
syncStateFn?.(toolCallId, nextState, options)
|
||||
} catch {}
|
||||
}
|
||||
61
apps/sim/lib/copilot/tools/client/other/checkoff-todo.ts
Normal file
61
apps/sim/lib/copilot/tools/client/other/checkoff-todo.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Check, Loader2, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface CheckoffTodoArgs {
|
||||
id?: string
|
||||
todoId?: string
|
||||
}
|
||||
|
||||
export class CheckoffTodoClientTool extends BaseClientTool {
|
||||
static readonly id = 'checkoff_todo'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, CheckoffTodoClientTool.id, CheckoffTodoClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Marking todo', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Marking todo', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Todo marked complete', icon: Check },
|
||||
[ClientToolCallState.error]: { text: 'Failed to mark todo', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: CheckoffTodoArgs): Promise<void> {
|
||||
const logger = createLogger('CheckoffTodoClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const todoId = args?.id || args?.todoId
|
||||
if (!todoId) {
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(400, 'Missing todo id')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const { useCopilotStore } = await import('@/stores/copilot/store')
|
||||
const store = useCopilotStore.getState()
|
||||
if (store.updatePlanTodoStatus) {
|
||||
store.updatePlanTodoStatus(todoId, 'completed')
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Todo checked off', { todoId })
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to check off todo')
|
||||
}
|
||||
}
|
||||
}
|
||||
74
apps/sim/lib/copilot/tools/client/other/make-api-request.ts
Normal file
74
apps/sim/lib/copilot/tools/client/other/make-api-request.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { Globe2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface MakeApiRequestArgs {
|
||||
url: string
|
||||
method: 'GET' | 'POST' | 'PUT'
|
||||
queryParams?: Record<string, string | number | boolean>
|
||||
headers?: Record<string, string>
|
||||
body?: any
|
||||
}
|
||||
|
||||
export class MakeApiRequestClientTool extends BaseClientTool {
|
||||
static readonly id = 'make_api_request'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, MakeApiRequestClientTool.id, MakeApiRequestClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Preparing API request', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Review API request', icon: Globe2 },
|
||||
[ClientToolCallState.executing]: { text: 'Executing API request', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'API request complete', icon: Globe2 },
|
||||
[ClientToolCallState.error]: { text: 'Failed to execute API request', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped API request', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted API request', icon: XCircle },
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Execute', icon: Globe2 },
|
||||
reject: { text: 'Skip', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: MakeApiRequestArgs): Promise<void> {
|
||||
const logger = createLogger('MakeApiRequestClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'make_api_request', payload: args || {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'API request executed', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'API request failed')
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: MakeApiRequestArgs): Promise<void> {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
import { Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface MarkTodoInProgressArgs {
|
||||
id?: string
|
||||
todoId?: string
|
||||
}
|
||||
|
||||
export class MarkTodoInProgressClientTool extends BaseClientTool {
|
||||
static readonly id = 'mark_todo_in_progress'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, MarkTodoInProgressClientTool.id, MarkTodoInProgressClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Marking todo in progress', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Marking todo in progress', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Marking todo in progress', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Todo marked in progress', icon: Loader2 },
|
||||
[ClientToolCallState.error]: { text: 'Failed to mark in progress', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted marking in progress', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped marking in progress', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: MarkTodoInProgressArgs): Promise<void> {
|
||||
const logger = createLogger('MarkTodoInProgressClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const todoId = args?.id || args?.todoId
|
||||
if (!todoId) {
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(400, 'Missing todo id')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const { useCopilotStore } = await import('@/stores/copilot/store')
|
||||
const store = useCopilotStore.getState()
|
||||
if (store.updatePlanTodoStatus) {
|
||||
store.updatePlanTodoStatus(todoId, 'executing')
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Todo marked in progress', { todoId })
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to mark todo in progress')
|
||||
}
|
||||
}
|
||||
}
|
||||
69
apps/sim/lib/copilot/tools/client/other/plan.ts
Normal file
69
apps/sim/lib/copilot/tools/client/other/plan.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { ListTodo, Loader2, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface PlanArgs {
|
||||
objective?: string
|
||||
todoList?: Array<{ id?: string; content: string } | string>
|
||||
}
|
||||
|
||||
export class PlanClientTool extends BaseClientTool {
|
||||
static readonly id = 'plan'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, PlanClientTool.id, PlanClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Crafting an approach', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Crafting an approach', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Crafting an approach', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Crafted an approach', icon: ListTodo },
|
||||
[ClientToolCallState.error]: { text: 'Failed to craft an approach', icon: X },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted planning', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped planning approach', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: PlanArgs): Promise<void> {
|
||||
const logger = createLogger('PlanClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Update store todos from args if present (client-side only)
|
||||
try {
|
||||
const todoList = args?.todoList
|
||||
if (Array.isArray(todoList)) {
|
||||
const todos = todoList.map((item: any, index: number) => ({
|
||||
id: (item && (item.id || item.todoId)) || `todo-${index}`,
|
||||
content: typeof item === 'string' ? item : item.content,
|
||||
completed: false,
|
||||
executing: false,
|
||||
}))
|
||||
const { useCopilotStore } = await import('@/stores/copilot/store')
|
||||
const store = useCopilotStore.getState()
|
||||
if (store.setPlanTodos) {
|
||||
store.setPlanTodos(todos)
|
||||
useCopilotStore.setState({ showPlanTodos: true })
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to update plan todos in store', { message: (e as any)?.message })
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
// Echo args back so store/tooling can parse todoList if needed
|
||||
await this.markToolComplete(200, 'Plan ready', args || {})
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to plan')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface SearchDocumentationArgs {
|
||||
query: string
|
||||
topK?: number
|
||||
threshold?: number
|
||||
}
|
||||
|
||||
export class SearchDocumentationClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_documentation'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SearchDocumentationClientTool.id, SearchDocumentationClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching documentation', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Searching documentation', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Searching documentation', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Documentation search complete', icon: BookOpen },
|
||||
[ClientToolCallState.error]: { text: 'Failed to search docs', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted documentation search', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped documentation search', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: SearchDocumentationArgs): Promise<void> {
|
||||
const logger = createLogger('SearchDocumentationClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'search_documentation', payload: args || {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Documentation search complete', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Documentation search failed')
|
||||
}
|
||||
}
|
||||
}
|
||||
61
apps/sim/lib/copilot/tools/client/other/search-online.ts
Normal file
61
apps/sim/lib/copilot/tools/client/other/search-online.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface SearchOnlineArgs {
|
||||
query: string
|
||||
num?: number
|
||||
type?: string
|
||||
gl?: string
|
||||
hl?: string
|
||||
}
|
||||
|
||||
export class SearchOnlineClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_online'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SearchOnlineClientTool.id, SearchOnlineClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching online', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Searching online', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Searching online', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Online search complete', icon: Globe },
|
||||
[ClientToolCallState.error]: { text: 'Failed to search online', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped online search', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted online search', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: SearchOnlineArgs): Promise<void> {
|
||||
const logger = createLogger('SearchOnlineClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'search_online', payload: args || {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Online search complete', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Search failed')
|
||||
}
|
||||
}
|
||||
}
|
||||
34
apps/sim/lib/copilot/tools/client/registry.ts
Normal file
34
apps/sim/lib/copilot/tools/client/registry.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import type { ClientToolDefinition, ToolExecutionContext } from '@/lib/copilot/tools/client/types'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('ClientToolRegistry')
|
||||
|
||||
const tools: Record<string, ClientToolDefinition<any>> = {}
|
||||
|
||||
export function registerTool(def: ClientToolDefinition<any>) {
|
||||
tools[def.name] = def
|
||||
}
|
||||
|
||||
export function getTool(name: string): ClientToolDefinition<any> | undefined {
|
||||
return tools[name]
|
||||
}
|
||||
|
||||
export function createExecutionContext(params: {
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
}): ToolExecutionContext {
|
||||
const { toolCallId, toolName } = params
|
||||
return {
|
||||
toolCallId,
|
||||
toolName,
|
||||
log: (level, message, extra) => {
|
||||
try {
|
||||
logger[level](message, { toolCallId, toolName, ...(extra || {}) })
|
||||
} catch {}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function getRegisteredTools(): Record<string, ClientToolDefinition<any>> {
|
||||
return { ...tools }
|
||||
}
|
||||
33
apps/sim/lib/copilot/tools/client/types.ts
Normal file
33
apps/sim/lib/copilot/tools/client/types.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { BaseClientToolMetadata } from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export interface ToolExecutionContext {
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
// Logging only; tools must not mutate store state directly
|
||||
log: (
|
||||
level: 'debug' | 'info' | 'warn' | 'error',
|
||||
message: string,
|
||||
extra?: Record<string, any>
|
||||
) => void
|
||||
}
|
||||
|
||||
export interface ToolRunResult {
|
||||
status: number
|
||||
message?: any
|
||||
data?: any
|
||||
}
|
||||
|
||||
export interface ClientToolDefinition<Args = any> {
|
||||
name: string
|
||||
metadata?: BaseClientToolMetadata
|
||||
// Return true if this tool requires user confirmation before execution
|
||||
hasInterrupt?: boolean | ((args?: Args) => boolean)
|
||||
// Main execution entry point. Returns a result for the store to handle.
|
||||
execute: (ctx: ToolExecutionContext, args?: Args) => Promise<ToolRunResult | undefined>
|
||||
// Optional accept/reject handlers for interrupt flows
|
||||
accept?: (ctx: ToolExecutionContext, args?: Args) => Promise<ToolRunResult | undefined>
|
||||
reject?: (ctx: ToolExecutionContext, args?: Args) => Promise<ToolRunResult | undefined>
|
||||
}
|
||||
|
||||
export { ClientToolCallState }
|
||||
@@ -0,0 +1,77 @@
|
||||
import { KeyRound, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface GetEnvArgs {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class GetEnvironmentVariablesClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_environment_variables'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(
|
||||
toolCallId,
|
||||
GetEnvironmentVariablesClientTool.id,
|
||||
GetEnvironmentVariablesClientTool.metadata
|
||||
)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Reading environment variables',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Reading environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Reading environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Read environment variables', icon: KeyRound },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read environment variables', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted reading environment variables',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped reading environment variables',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetEnvArgs): Promise<void> {
|
||||
const logger = createLogger('GetEnvironmentVariablesClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: GetEnvArgs = { ...(args || {}) }
|
||||
if (!payload.workflowId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
}
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_environment_variables', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Environment variables fetched', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to get environment variables')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import { Key, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface GetOAuthCredentialsArgs {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class GetOAuthCredentialsClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_oauth_credentials'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GetOAuthCredentialsClientTool.id, GetOAuthCredentialsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching OAuth credentials', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching OAuth credentials', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Retrieving login IDs', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved login IDs', icon: Key },
|
||||
[ClientToolCallState.error]: { text: 'Failed to retrieve login IDs', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted fetching OAuth credentials',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped fetching OAuth credentials',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetOAuthCredentialsArgs): Promise<void> {
|
||||
const logger = createLogger('GetOAuthCredentialsClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: GetOAuthCredentialsArgs = { ...(args || {}) }
|
||||
if (!payload.workflowId && !payload.userId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
}
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_oauth_credentials', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Retrieved login IDs', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to retrieve login IDs')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
import { Loader2, Settings2, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface SetEnvArgs {
|
||||
variables: Record<string, string>
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class SetEnvironmentVariablesClientTool extends BaseClientTool {
|
||||
static readonly id = 'set_environment_variables'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(
|
||||
toolCallId,
|
||||
SetEnvironmentVariablesClientTool.id,
|
||||
SetEnvironmentVariablesClientTool.metadata
|
||||
)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Setting environment variables',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Setting environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Setting environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Set environment variables', icon: Settings2 },
|
||||
[ClientToolCallState.error]: { text: 'Failed to set environment variables', icon: X },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted setting environment variables',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped setting environment variables',
|
||||
icon: XCircle,
|
||||
},
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Apply', icon: Settings2 },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: SetEnvArgs): Promise<void> {
|
||||
const logger = createLogger('SetEnvironmentVariablesClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: SetEnvArgs = { ...(args || { variables: {} }) }
|
||||
if (!payload.workflowId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
}
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'set_environment_variables', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Environment variables updated', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to set environment variables')
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: SetEnvArgs): Promise<void> {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
117
apps/sim/lib/copilot/tools/client/workflow/build-workflow.ts
Normal file
117
apps/sim/lib/copilot/tools/client/workflow/build-workflow.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { Grid2x2, Grid2x2Check, Grid2x2X, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import {
|
||||
BuildWorkflowInput,
|
||||
BuildWorkflowResult,
|
||||
ExecuteResponseSuccessSchema,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
|
||||
interface BuildWorkflowArgs {
|
||||
yamlContent: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export class BuildWorkflowClientTool extends BaseClientTool {
|
||||
static readonly id = 'build_workflow'
|
||||
private lastResult: any | undefined
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, BuildWorkflowClientTool.id, BuildWorkflowClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Building your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Building your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Built your workflow', icon: Grid2x2Check },
|
||||
[ClientToolCallState.error]: { text: 'Failed to build your workflow', icon: XCircle },
|
||||
[ClientToolCallState.review]: { text: 'Review your workflow', icon: Grid2x2 },
|
||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted building your workflow', icon: MinusCircle },
|
||||
[ClientToolCallState.pending]: { text: 'Building your workflow', icon: Loader2 },
|
||||
},
|
||||
}
|
||||
|
||||
async handleAccept(): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
logger.info('handleAccept called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
hasResult: this.lastResult !== undefined,
|
||||
})
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Workflow accepted', this.lastResult)
|
||||
this.setState(ClientToolCallState.success)
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
logger.info('handleReject called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
})
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
await this.markToolComplete(200, 'Workflow rejected')
|
||||
}
|
||||
|
||||
async execute(args?: BuildWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
try {
|
||||
logger.info('execute called', { toolCallId: this.toolCallId, argsProvided: !!args })
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const { yamlContent, description } = BuildWorkflowInput.parse(args || {})
|
||||
logger.info('parsed input', {
|
||||
yamlLength: yamlContent?.length || 0,
|
||||
hasDescription: !!description,
|
||||
})
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'build_workflow', payload: { yamlContent, description } }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = BuildWorkflowResult.parse(parsed.result)
|
||||
this.lastResult = result
|
||||
logger.info('server result parsed', {
|
||||
success: result.success,
|
||||
hasWorkflowState: !!(result as any).workflowState,
|
||||
yamlLength: result.yamlContent?.length || 0,
|
||||
})
|
||||
|
||||
// Populate diff preview immediately (without marking complete yet)
|
||||
try {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
await diffStore.setProposedChanges(result.yamlContent)
|
||||
logger.info('diff proposed changes set')
|
||||
} catch (e) {
|
||||
const logArg: any = e
|
||||
logger.warn('Failed to set proposed changes in diff store', logArg)
|
||||
}
|
||||
|
||||
// Mark complete as soon as the diff view is available so LLM stream continues
|
||||
await this.markToolComplete(200, 'Workflow diff ready for review', result)
|
||||
|
||||
// Move tool into review and stash the result on the tool instance
|
||||
logger.info('setting review state')
|
||||
this.setState(ClientToolCallState.review, { result })
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('execute error', { message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user