mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 23:48:09 -05:00
Compare commits
71 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed9b9ad83f | ||
|
|
766279bb8b | ||
|
|
1038e148c3 | ||
|
|
8b78200991 | ||
|
|
c8f4791582 | ||
|
|
6c9e0ec88b | ||
|
|
bbbf1c2941 | ||
|
|
efc487a845 | ||
|
|
5786909c5e | ||
|
|
833c5fefd5 | ||
|
|
79dd1ccb9f | ||
|
|
730164abee | ||
|
|
25b2c45ec0 | ||
|
|
780870c48e | ||
|
|
fdfa935a09 | ||
|
|
917552f041 | ||
|
|
4846f6c60d | ||
|
|
be810013c7 | ||
|
|
1ee4263e60 | ||
|
|
60c4668682 | ||
|
|
a268fb7c04 | ||
|
|
6c606750f5 | ||
|
|
e13adab14f | ||
|
|
44bc12b474 | ||
|
|
991f0442e9 | ||
|
|
2ebfb576ae | ||
|
|
11a7be54f2 | ||
|
|
f5219d03c3 | ||
|
|
f0643e01b4 | ||
|
|
77b0c5b9ed | ||
|
|
9dbd44e555 | ||
|
|
9ea9f2d52e | ||
|
|
4cd707fadb | ||
|
|
f0b07428bc | ||
|
|
8c9e182e10 | ||
|
|
33dd59f7a7 | ||
|
|
53ee9f99db | ||
|
|
0f2a125eae | ||
|
|
e107363ea7 | ||
|
|
7e364a7977 | ||
|
|
35a37d8b45 | ||
|
|
2b52d88cee | ||
|
|
abad3620a3 | ||
|
|
a37c6bc812 | ||
|
|
cd1bd95952 | ||
|
|
4c9fdbe7fb | ||
|
|
2c47cf4161 | ||
|
|
db1cf8a6db | ||
|
|
c6912095f7 | ||
|
|
154d9eef6a | ||
|
|
c2ded1f3e1 | ||
|
|
ff43528d35 | ||
|
|
692ba69864 | ||
|
|
cb7ce8659b | ||
|
|
5caef3a37d | ||
|
|
a6888da124 | ||
|
|
07b0597f4f | ||
|
|
71e2994f9d | ||
|
|
9973b2c165 | ||
|
|
d9e5777538 | ||
|
|
dd74267313 | ||
|
|
1db72dc823 | ||
|
|
da707fa491 | ||
|
|
9ffaf305bd | ||
|
|
26e6286fda | ||
|
|
c795fc83aa | ||
|
|
cea42f5135 | ||
|
|
6fd6f921dc | ||
|
|
7530fb9a4e | ||
|
|
9a5b035822 | ||
|
|
0c0b6bf967 |
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
@@ -85,8 +85,8 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,scope=build-v2
|
||||
cache-to: type=gha,mode=max,scope=build-v2
|
||||
cache-from: type=gha,scope=build-v3
|
||||
cache-to: type=gha,mode=max,scope=build-v3
|
||||
provenance: false
|
||||
sbom: false
|
||||
|
||||
|
||||
44
.github/workflows/trigger-deploy.yml
vendored
Normal file
44
.github/workflows/trigger-deploy.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Trigger.dev Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- staging
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Trigger.dev Deploy
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: trigger-deploy-${{ github.ref }}
|
||||
cancel-in-progress: false
|
||||
env:
|
||||
TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
|
||||
- name: Deploy to Staging
|
||||
if: github.ref == 'refs/heads/staging'
|
||||
working-directory: ./apps/sim
|
||||
run: npx --yes trigger.dev@4.0.0 deploy -e staging
|
||||
|
||||
- name: Deploy to Production
|
||||
if: github.ref == 'refs/heads/main'
|
||||
working-directory: ./apps/sim
|
||||
run: npx --yes trigger.dev@4.0.0 deploy
|
||||
|
||||
@@ -33,12 +33,15 @@
|
||||
"microsoft_planner",
|
||||
"microsoft_teams",
|
||||
"mistral_parse",
|
||||
"mysql",
|
||||
"notion",
|
||||
"onedrive",
|
||||
"openai",
|
||||
"outlook",
|
||||
"parallel_ai",
|
||||
"perplexity",
|
||||
"pinecone",
|
||||
"postgresql",
|
||||
"qdrant",
|
||||
"reddit",
|
||||
"s3",
|
||||
|
||||
@@ -115,8 +115,7 @@ Read data from a Microsoft Excel spreadsheet
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Excel spreadsheet data and metadata |
|
||||
| `data` | object | Range data from the spreadsheet |
|
||||
|
||||
### `microsoft_excel_write`
|
||||
|
||||
@@ -136,8 +135,11 @@ Write data to a Microsoft Excel spreadsheet
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Write operation results and metadata |
|
||||
| `updatedRange` | string | The range that was updated |
|
||||
| `updatedRows` | number | Number of rows that were updated |
|
||||
| `updatedColumns` | number | Number of columns that were updated |
|
||||
| `updatedCells` | number | Number of cells that were updated |
|
||||
| `metadata` | object | Spreadsheet metadata |
|
||||
|
||||
### `microsoft_excel_table_add`
|
||||
|
||||
@@ -155,8 +157,9 @@ Add new rows to a Microsoft Excel table
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Operation success status |
|
||||
| `output` | object | Table add operation results and metadata |
|
||||
| `index` | number | Index of the first row that was added |
|
||||
| `values` | array | Array of rows that were added to the table |
|
||||
| `metadata` | object | Spreadsheet metadata |
|
||||
|
||||
|
||||
|
||||
|
||||
180
apps/docs/content/docs/tools/mysql.mdx
Normal file
180
apps/docs/content/docs/tools/mysql.mdx
Normal file
@@ -0,0 +1,180 @@
|
||||
---
|
||||
title: MySQL
|
||||
description: Connect to MySQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mysql"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
|
||||
|
||||
viewBox='0 0 25.6 25.6'
|
||||
>
|
||||
<path
|
||||
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
|
||||
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
|
||||
fillRule='evenodd'
|
||||
fill='#00678c'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [MySQL](https://www.mysql.com/) tool enables you to connect to any MySQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the MySQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your MySQL tables using the `mysql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `mysql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `mysql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `mysql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `mysql_execute` operation for advanced use cases.
|
||||
|
||||
The MySQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your MySQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `mysql_query`
|
||||
|
||||
Execute SELECT query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `mysql_insert`
|
||||
|
||||
Insert new record into MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert into |
|
||||
| `data` | object | Yes | Data to insert as key-value pairs |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of inserted rows |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `mysql_update`
|
||||
|
||||
Update existing records in MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update |
|
||||
| `data` | object | Yes | Data to update as key-value pairs |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of updated rows |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `mysql_delete`
|
||||
|
||||
Delete records from MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of deleted rows |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `mysql_execute`
|
||||
|
||||
Execute raw SQL query on MySQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `mysql`
|
||||
106
apps/docs/content/docs/tools/parallel_ai.mdx
Normal file
106
apps/docs/content/docs/tools/parallel_ai.mdx
Normal file
@@ -0,0 +1,106 @@
|
||||
---
|
||||
title: Parallel AI
|
||||
description: Search with Parallel AI
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="parallel_ai"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
fill='currentColor'
|
||||
|
||||
|
||||
viewBox='0 0 271 270'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
<path
|
||||
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
|
||||
fill='#1D1C1A'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Parallel AI](https://parallel.ai/) is an advanced web search and content extraction platform designed to deliver comprehensive, high-quality results for any query. By leveraging intelligent processing and large-scale data extraction, Parallel AI enables users and agents to access, analyze, and synthesize information from across the web with speed and accuracy.
|
||||
|
||||
With Parallel AI, you can:
|
||||
|
||||
- **Search the web intelligently**: Retrieve relevant, up-to-date information from a wide range of sources
|
||||
- **Extract and summarize content**: Get concise, meaningful excerpts from web pages and documents
|
||||
- **Customize search objectives**: Tailor queries to specific needs or questions for targeted results
|
||||
- **Process results at scale**: Handle large volumes of search results with advanced processing options
|
||||
- **Integrate with workflows**: Use Parallel AI within Sim to automate research, content gathering, and knowledge extraction
|
||||
- **Control output granularity**: Specify the number of results and the amount of content per result
|
||||
- **Secure API access**: Protect your searches and data with API key authentication
|
||||
|
||||
In Sim, the Parallel AI integration empowers your agents to perform web searches and extract content programmatically. This enables powerful automation scenarios such as real-time research, competitive analysis, content monitoring, and knowledge base creation. By connecting Sim with Parallel AI, you unlock the ability for agents to gather, process, and utilize web data as part of your automated workflows.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `parallel_search`
|
||||
|
||||
Search the web using Parallel AI. Provides comprehensive search results with intelligent processing and content extraction.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `objective` | string | Yes | The search objective or question to answer |
|
||||
| `search_queries` | string | No | Optional comma-separated list of search queries to execute |
|
||||
| `processor` | string | No | Processing method: base or pro \(default: base\) |
|
||||
| `max_results` | number | No | Maximum number of results to return \(default: 5\) |
|
||||
| `max_chars_per_result` | number | No | Maximum characters per result \(default: 1500\) |
|
||||
| `apiKey` | string | Yes | Parallel AI API Key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Search results with excerpts from relevant pages |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `parallel_ai`
|
||||
188
apps/docs/content/docs/tools/postgresql.mdx
Normal file
188
apps/docs/content/docs/tools/postgresql.mdx
Normal file
@@ -0,0 +1,188 @@
|
||||
---
|
||||
title: PostgreSQL
|
||||
description: Connect to PostgreSQL database
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="postgresql"
|
||||
color="#336791"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
|
||||
|
||||
viewBox='-4 0 264 264'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
preserveAspectRatio='xMinYMin meet'
|
||||
>
|
||||
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
|
||||
<path
|
||||
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
|
||||
fill='#336791'
|
||||
/>
|
||||
<path
|
||||
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
|
||||
fill='#FFF'
|
||||
/>
|
||||
<path
|
||||
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
|
||||
fill='#FFF'
|
||||
/>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
The [PostgreSQL](https://www.postgresql.org/) tool enables you to connect to any PostgreSQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
|
||||
|
||||
With the PostgreSQL tool, you can:
|
||||
|
||||
- **Query data**: Execute SELECT queries to retrieve data from your PostgreSQL tables using the `postgresql_query` operation.
|
||||
- **Insert records**: Add new rows to your tables with the `postgresql_insert` operation by specifying the table and data to insert.
|
||||
- **Update records**: Modify existing data in your tables using the `postgresql_update` operation, providing the table, new data, and WHERE conditions.
|
||||
- **Delete records**: Remove rows from your tables with the `postgresql_delete` operation, specifying the table and WHERE conditions.
|
||||
- **Execute raw SQL**: Run any custom SQL command using the `postgresql_execute` operation for advanced use cases.
|
||||
|
||||
The PostgreSQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your PostgreSQL data programmatically.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `postgresql_query`
|
||||
|
||||
Execute a SELECT query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | SQL SELECT query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
|
||||
### `postgresql_insert`
|
||||
|
||||
Insert data into PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to insert data into |
|
||||
| `data` | object | Yes | Data object to insert \(key-value pairs\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Inserted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows inserted |
|
||||
|
||||
### `postgresql_update`
|
||||
|
||||
Update data in PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to update data in |
|
||||
| `data` | object | Yes | Data object with fields to update \(key-value pairs\) |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Updated data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows updated |
|
||||
|
||||
### `postgresql_delete`
|
||||
|
||||
Delete data from PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `table` | string | Yes | Table name to delete data from |
|
||||
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Deleted data \(if RETURNING clause used\) |
|
||||
| `rowCount` | number | Number of rows deleted |
|
||||
|
||||
### `postgresql_execute`
|
||||
|
||||
Execute raw SQL query on PostgreSQL database
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `query` | string | Yes | Raw SQL query to execute |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `postgresql`
|
||||
@@ -3,7 +3,6 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import { GithubIcon, GoogleIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { client } from '@/lib/auth-client'
|
||||
|
||||
interface SocialLoginButtonsProps {
|
||||
@@ -114,58 +113,16 @@ export function SocialLoginButtons({
|
||||
</Button>
|
||||
)
|
||||
|
||||
const renderGithubButton = () => {
|
||||
if (githubAvailable) return githubButton
|
||||
const hasAnyOAuthProvider = githubAvailable || googleAvailable
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>{githubButton}</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className='border-neutral-700 bg-neutral-800 text-white'>
|
||||
<p>
|
||||
GitHub login requires OAuth credentials to be configured. Add the following
|
||||
environment variables:
|
||||
</p>
|
||||
<ul className='mt-2 space-y-1 text-neutral-300 text-xs'>
|
||||
<li>• GITHUB_CLIENT_ID</li>
|
||||
<li>• GITHUB_CLIENT_SECRET</li>
|
||||
</ul>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)
|
||||
}
|
||||
|
||||
const renderGoogleButton = () => {
|
||||
if (googleAvailable) return googleButton
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>{googleButton}</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className='border-neutral-700 bg-neutral-800 text-white'>
|
||||
<p>
|
||||
Google login requires OAuth credentials to be configured. Add the following
|
||||
environment variables:
|
||||
</p>
|
||||
<ul className='mt-2 space-y-1 text-neutral-300 text-xs'>
|
||||
<li>• GOOGLE_CLIENT_ID</li>
|
||||
<li>• GOOGLE_CLIENT_SECRET</li>
|
||||
</ul>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)
|
||||
if (!hasAnyOAuthProvider) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='grid gap-3'>
|
||||
{renderGithubButton()}
|
||||
{renderGoogleButton()}
|
||||
{githubAvailable && githubButton}
|
||||
{googleAvailable && googleButton}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -28,12 +28,12 @@ export default function AuthLayout({ children }: { children: React.ReactNode })
|
||||
<img
|
||||
src={brand.logoUrl}
|
||||
alt={`${brand.name} Logo`}
|
||||
width={42}
|
||||
height={42}
|
||||
className='h-[42px] w-[42px] object-contain'
|
||||
width={56}
|
||||
height={56}
|
||||
className='h-[56px] w-[56px] object-contain'
|
||||
/>
|
||||
) : (
|
||||
<Image src='/sim.svg' alt={`${brand.name} Logo`} width={42} height={42} />
|
||||
<Image src='/sim.svg' alt={`${brand.name} Logo`} width={56} height={56} />
|
||||
)}
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
@@ -366,11 +366,13 @@ export default function LoginPage({
|
||||
callbackURL={callbackUrl}
|
||||
/>
|
||||
|
||||
<div className='relative mt-2 py-4'>
|
||||
<div className='absolute inset-0 flex items-center'>
|
||||
<div className='w-full border-neutral-700/50 border-t' />
|
||||
{(githubAvailable || googleAvailable) && (
|
||||
<div className='relative mt-2 py-4'>
|
||||
<div className='absolute inset-0 flex items-center'>
|
||||
<div className='w-full border-neutral-700/50 border-t' />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<form onSubmit={onSubmit} className='space-y-5'>
|
||||
<div className='space-y-4'>
|
||||
|
||||
@@ -381,11 +381,13 @@ function SignupFormContent({
|
||||
isProduction={isProduction}
|
||||
/>
|
||||
|
||||
<div className='relative mt-2 py-4'>
|
||||
<div className='absolute inset-0 flex items-center'>
|
||||
<div className='w-full border-neutral-700/50 border-t' />
|
||||
{(githubAvailable || googleAvailable) && (
|
||||
<div className='relative mt-2 py-4'>
|
||||
<div className='absolute inset-0 flex items-center'>
|
||||
<div className='w-full border-neutral-700/50 border-t' />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<form onSubmit={onSubmit} className='space-y-5'>
|
||||
<div className='space-y-4'>
|
||||
|
||||
@@ -354,6 +354,18 @@ export function mockExecutionDependencies() {
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock Trigger.dev SDK (tasks.trigger and task factory) for tests that import background modules
|
||||
*/
|
||||
export function mockTriggerDevSdk() {
|
||||
vi.mock('@trigger.dev/sdk', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
task: vi.fn().mockReturnValue({}),
|
||||
}))
|
||||
}
|
||||
|
||||
export function mockWorkflowAccessValidation(shouldSucceed = true) {
|
||||
if (shouldSucceed) {
|
||||
vi.mock('@/app/api/workflows/middleware', () => ({
|
||||
|
||||
@@ -84,14 +84,12 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if the access token is valid
|
||||
if (!credential.accessToken) {
|
||||
logger.warn(`[${requestId}] No access token available for credential`)
|
||||
return NextResponse.json({ error: 'No access token available' }, { status: 400 })
|
||||
}
|
||||
|
||||
try {
|
||||
// Refresh the token if needed
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
return NextResponse.json({ accessToken }, { status: 200 })
|
||||
} catch (_error) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshOAuthToken } from '@/lib/oauth/oauth'
|
||||
@@ -70,7 +70,8 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
})
|
||||
.from(account)
|
||||
.where(and(eq(account.userId, userId), eq(account.providerId, providerId)))
|
||||
.orderBy(account.createdAt)
|
||||
// Always use the most recently updated credential for this provider
|
||||
.orderBy(desc(account.updatedAt))
|
||||
.limit(1)
|
||||
|
||||
if (connections.length === 0) {
|
||||
@@ -80,19 +81,13 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
|
||||
const credential = connections[0]
|
||||
|
||||
// Check if we have a valid access token
|
||||
if (!credential.accessToken) {
|
||||
logger.warn(`Access token is null for user ${userId}, provider ${providerId}`)
|
||||
return null
|
||||
}
|
||||
|
||||
// Check if the token is expired and needs refreshing
|
||||
// Determine whether we should refresh: missing token OR expired token
|
||||
const now = new Date()
|
||||
const tokenExpiry = credential.accessTokenExpiresAt
|
||||
// Only refresh if we have an expiration time AND it's expired AND we have a refresh token
|
||||
const needsRefresh = tokenExpiry && tokenExpiry < now && !!credential.refreshToken
|
||||
const shouldAttemptRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (tokenExpiry && tokenExpiry < now))
|
||||
|
||||
if (needsRefresh) {
|
||||
if (shouldAttemptRefresh) {
|
||||
logger.info(
|
||||
`Access token expired for user ${userId}, provider ${providerId}. Attempting to refresh.`
|
||||
)
|
||||
@@ -141,6 +136,13 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
}
|
||||
}
|
||||
|
||||
if (!credential.accessToken) {
|
||||
logger.warn(
|
||||
`Access token is null and no refresh attempted or available for user ${userId}, provider ${providerId}`
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
logger.info(`Found valid OAuth token for user ${userId}, provider ${providerId}`)
|
||||
return credential.accessToken
|
||||
}
|
||||
@@ -164,19 +166,21 @@ export async function refreshAccessTokenIfNeeded(
|
||||
return null
|
||||
}
|
||||
|
||||
// Check if we need to refresh the token
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
if (needsRefresh && credential.refreshToken) {
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
try {
|
||||
const refreshedToken = await refreshOAuthToken(credential.providerId, credential.refreshToken)
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
credential.refreshToken!
|
||||
)
|
||||
|
||||
if (!refreshedToken) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential: ${credentialId}`, {
|
||||
@@ -217,6 +221,7 @@ export async function refreshAccessTokenIfNeeded(
|
||||
return null
|
||||
}
|
||||
} else if (!accessToken) {
|
||||
// We have no access token and either no refresh token or not eligible to refresh
|
||||
logger.error(`[${requestId}] Missing access token for credential`)
|
||||
return null
|
||||
}
|
||||
@@ -233,21 +238,20 @@ export async function refreshTokenIfNeeded(
|
||||
credential: any,
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
// Check if we need to refresh the token
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// If token is still valid, return it directly
|
||||
if (!needsRefresh || !credential.refreshToken) {
|
||||
// If token appears valid and present, return it directly
|
||||
if (!shouldRefresh) {
|
||||
logger.info(`[${requestId}] Access token is valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
|
||||
try {
|
||||
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken)
|
||||
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken!)
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
|
||||
@@ -4,8 +4,9 @@ import { auth } from '@/lib/auth'
|
||||
|
||||
export async function POST() {
|
||||
try {
|
||||
const hdrs = await headers()
|
||||
const response = await auth.api.generateOneTimeToken({
|
||||
headers: await headers(),
|
||||
headers: hdrs,
|
||||
})
|
||||
|
||||
if (!response) {
|
||||
@@ -14,7 +15,6 @@ export async function POST() {
|
||||
|
||||
return NextResponse.json({ token: response.token })
|
||||
} catch (error) {
|
||||
console.error('Error generating one-time token:', error)
|
||||
return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { processDailyBillingCheck } from '@/lib/billing/core/billing'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('DailyBillingCron')
|
||||
|
||||
/**
|
||||
* Daily billing CRON job endpoint that checks individual billing periods
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
logger.info('Starting daily billing check cron job')
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
// Process overage billing for users and organizations with periods ending today
|
||||
const result = await processDailyBillingCheck()
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Daily billing check completed successfully', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error('Daily billing check completed with errors', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
errors: result.errors,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
errors: result.errors,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in monthly billing cron job', { error })
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Internal server error during daily billing check',
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for manual testing and health checks
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check health check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
status: 'ready',
|
||||
message:
|
||||
'Daily billing check cron job is ready to process users and organizations with periods ending today',
|
||||
currentDate: new Date().toISOString().split('T')[0],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error in billing health check', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
status: 'error',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -143,7 +143,7 @@ async function generateChatTitleAsync(
|
||||
streamController?: ReadableStreamDefaultController<Uint8Array>
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`[${requestId}] Starting async title generation for chat ${chatId}`)
|
||||
// logger.info(`[${requestId}] Starting async title generation for chat ${chatId}`)
|
||||
|
||||
const title = await generateChatTitle(userMessage)
|
||||
|
||||
@@ -167,7 +167,7 @@ async function generateChatTitleAsync(
|
||||
logger.debug(`[${requestId}] Sent title_updated event to client: "${title}"`)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Generated title for chat ${chatId}: "${title}"`)
|
||||
// logger.info(`[${requestId}] Generated title for chat ${chatId}: "${title}"`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate title for chat ${chatId}:`, error)
|
||||
// Don't throw - this is a background operation
|
||||
@@ -229,21 +229,21 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Processing copilot chat request`, {
|
||||
userId: authenticatedUserId,
|
||||
workflowId,
|
||||
chatId,
|
||||
mode,
|
||||
stream,
|
||||
createNewChat,
|
||||
messageLength: message.length,
|
||||
hasImplicitFeedback: !!implicitFeedback,
|
||||
provider: provider || 'openai',
|
||||
hasConversationId: !!conversationId,
|
||||
depth,
|
||||
prefetch,
|
||||
origin: requestOrigin,
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Processing copilot chat request`, {
|
||||
// userId: authenticatedUserId,
|
||||
// workflowId,
|
||||
// chatId,
|
||||
// mode,
|
||||
// stream,
|
||||
// createNewChat,
|
||||
// messageLength: message.length,
|
||||
// hasImplicitFeedback: !!implicitFeedback,
|
||||
// provider: provider || 'openai',
|
||||
// hasConversationId: !!conversationId,
|
||||
// depth,
|
||||
// prefetch,
|
||||
// origin: requestOrigin,
|
||||
// })
|
||||
|
||||
// Handle chat context
|
||||
let currentChat: any = null
|
||||
@@ -285,7 +285,7 @@ export async function POST(req: NextRequest) {
|
||||
// Process file attachments if present
|
||||
const processedFileContents: any[] = []
|
||||
if (fileAttachments && fileAttachments.length > 0) {
|
||||
logger.info(`[${tracker.requestId}] Processing ${fileAttachments.length} file attachments`)
|
||||
// logger.info(`[${tracker.requestId}] Processing ${fileAttachments.length} file attachments`)
|
||||
|
||||
for (const attachment of fileAttachments) {
|
||||
try {
|
||||
@@ -296,7 +296,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
// Download file from S3
|
||||
logger.info(`[${tracker.requestId}] Downloading file: ${attachment.s3_key}`)
|
||||
// logger.info(`[${tracker.requestId}] Downloading file: ${attachment.s3_key}`)
|
||||
let fileBuffer: Buffer
|
||||
if (USE_S3_STORAGE) {
|
||||
fileBuffer = await downloadFromS3WithConfig(attachment.s3_key, S3_COPILOT_CONFIG)
|
||||
@@ -309,9 +309,9 @@ export async function POST(req: NextRequest) {
|
||||
const fileContent = createAnthropicFileContent(fileBuffer, attachment.media_type)
|
||||
if (fileContent) {
|
||||
processedFileContents.push(fileContent)
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Processed file: ${attachment.filename} (${attachment.media_type})`
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Processed file: ${attachment.filename} (${attachment.media_type})`
|
||||
// )
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
@@ -424,27 +424,7 @@ export async function POST(req: NextRequest) {
|
||||
...(requestOrigin ? { origin: requestOrigin } : {}),
|
||||
}
|
||||
|
||||
// Log the payload being sent to the streaming endpoint
|
||||
try {
|
||||
logger.info(`[${tracker.requestId}] Sending payload to sim agent streaming endpoint`, {
|
||||
url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
|
||||
provider: providerToUse,
|
||||
mode,
|
||||
stream,
|
||||
workflowId,
|
||||
hasConversationId: !!effectiveConversationId,
|
||||
depth: typeof effectiveDepth === 'number' ? effectiveDepth : undefined,
|
||||
prefetch: typeof effectivePrefetch === 'boolean' ? effectivePrefetch : undefined,
|
||||
messagesCount: requestPayload.messages.length,
|
||||
...(requestOrigin ? { origin: requestOrigin } : {}),
|
||||
})
|
||||
// Full payload as JSON string
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Full streaming payload: ${JSON.stringify(requestPayload)}`
|
||||
)
|
||||
} catch (e) {
|
||||
logger.warn(`[${tracker.requestId}] Failed to log payload preview for streaming endpoint`, e)
|
||||
}
|
||||
// Log the payload being sent to the streaming endpoint (logs currently disabled)
|
||||
|
||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
|
||||
method: 'POST',
|
||||
@@ -475,7 +455,7 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
// If streaming is requested, forward the stream and update chat later
|
||||
if (stream && simAgentResponse.body) {
|
||||
logger.info(`[${tracker.requestId}] Streaming response from sim agent`)
|
||||
// logger.info(`[${tracker.requestId}] Streaming response from sim agent`)
|
||||
|
||||
// Create user message to save
|
||||
const userMessage = {
|
||||
@@ -493,7 +473,7 @@ export async function POST(req: NextRequest) {
|
||||
let assistantContent = ''
|
||||
const toolCalls: any[] = []
|
||||
let buffer = ''
|
||||
let isFirstDone = true
|
||||
const isFirstDone = true
|
||||
let responseIdFromStart: string | undefined
|
||||
let responseIdFromDone: string | undefined
|
||||
// Track tool call progress to identify a safe done event
|
||||
@@ -515,30 +495,30 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
// Start title generation in parallel if needed
|
||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||
logger.info(`[${tracker.requestId}] Starting title generation with stream updates`, {
|
||||
chatId: actualChatId,
|
||||
hasTitle: !!currentChat?.title,
|
||||
conversationLength: conversationHistory.length,
|
||||
message: message.substring(0, 100) + (message.length > 100 ? '...' : ''),
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Starting title generation with stream updates`, {
|
||||
// chatId: actualChatId,
|
||||
// hasTitle: !!currentChat?.title,
|
||||
// conversationLength: conversationHistory.length,
|
||||
// message: message.substring(0, 100) + (message.length > 100 ? '...' : ''),
|
||||
// })
|
||||
generateChatTitleAsync(actualChatId, message, tracker.requestId, controller).catch(
|
||||
(error) => {
|
||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||
}
|
||||
)
|
||||
} else {
|
||||
logger.debug(`[${tracker.requestId}] Skipping title generation`, {
|
||||
chatId: actualChatId,
|
||||
hasTitle: !!currentChat?.title,
|
||||
conversationLength: conversationHistory.length,
|
||||
reason: !actualChatId
|
||||
? 'no chatId'
|
||||
: currentChat?.title
|
||||
? 'already has title'
|
||||
: conversationHistory.length > 0
|
||||
? 'not first message'
|
||||
: 'unknown',
|
||||
})
|
||||
// logger.debug(`[${tracker.requestId}] Skipping title generation`, {
|
||||
// chatId: actualChatId,
|
||||
// hasTitle: !!currentChat?.title,
|
||||
// conversationLength: conversationHistory.length,
|
||||
// reason: !actualChatId
|
||||
// ? 'no chatId'
|
||||
// : currentChat?.title
|
||||
// ? 'already has title'
|
||||
// : conversationHistory.length > 0
|
||||
// ? 'not first message'
|
||||
// : 'unknown',
|
||||
// })
|
||||
}
|
||||
|
||||
// Forward the sim agent stream and capture assistant response
|
||||
@@ -549,7 +529,7 @@ export async function POST(req: NextRequest) {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) {
|
||||
logger.info(`[${tracker.requestId}] Stream reading completed`)
|
||||
// logger.info(`[${tracker.requestId}] Stream reading completed`)
|
||||
break
|
||||
}
|
||||
|
||||
@@ -559,9 +539,9 @@ export async function POST(req: NextRequest) {
|
||||
controller.enqueue(value)
|
||||
} catch (error) {
|
||||
// Client disconnected - stop reading from sim agent
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Client disconnected, stopping stream processing`
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Client disconnected, stopping stream processing`
|
||||
// )
|
||||
reader.cancel() // Stop reading from sim agent
|
||||
break
|
||||
}
|
||||
@@ -608,15 +588,15 @@ export async function POST(req: NextRequest) {
|
||||
break
|
||||
|
||||
case 'tool_call':
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Tool call ${event.data?.partial ? '(partial)' : '(complete)'}:`,
|
||||
{
|
||||
id: event.data?.id,
|
||||
name: event.data?.name,
|
||||
arguments: event.data?.arguments,
|
||||
blockIndex: event.data?._blockIndex,
|
||||
}
|
||||
)
|
||||
// logger.info(
|
||||
// `[${tracker.requestId}] Tool call ${event.data?.partial ? '(partial)' : '(complete)'}:`,
|
||||
// {
|
||||
// id: event.data?.id,
|
||||
// name: event.data?.name,
|
||||
// arguments: event.data?.arguments,
|
||||
// blockIndex: event.data?._blockIndex,
|
||||
// }
|
||||
// )
|
||||
if (!event.data?.partial) {
|
||||
toolCalls.push(event.data)
|
||||
if (event.data?.id) {
|
||||
@@ -625,30 +605,24 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
break
|
||||
|
||||
case 'tool_execution':
|
||||
logger.info(`[${tracker.requestId}] Tool execution started:`, {
|
||||
toolCallId: event.toolCallId,
|
||||
toolName: event.toolName,
|
||||
status: event.status,
|
||||
})
|
||||
case 'tool_generating':
|
||||
// logger.info(`[${tracker.requestId}] Tool generating:`, {
|
||||
// toolCallId: event.toolCallId,
|
||||
// toolName: event.toolName,
|
||||
// })
|
||||
if (event.toolCallId) {
|
||||
if (event.status === 'completed') {
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
completedToolExecutionIds.add(event.toolCallId)
|
||||
} else {
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
startedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
break
|
||||
|
||||
case 'tool_result':
|
||||
logger.info(`[${tracker.requestId}] Tool result received:`, {
|
||||
toolCallId: event.toolCallId,
|
||||
toolName: event.toolName,
|
||||
success: event.success,
|
||||
result: `${JSON.stringify(event.result).substring(0, 200)}...`,
|
||||
resultSize: JSON.stringify(event.result).length,
|
||||
})
|
||||
// logger.info(`[${tracker.requestId}] Tool result received:`, {
|
||||
// toolCallId: event.toolCallId,
|
||||
// toolName: event.toolName,
|
||||
// success: event.success,
|
||||
// result: `${JSON.stringify(event.result).substring(0, 200)}...`,
|
||||
// resultSize: JSON.stringify(event.result).length,
|
||||
// })
|
||||
if (event.toolCallId) {
|
||||
completedToolExecutionIds.add(event.toolCallId)
|
||||
}
|
||||
@@ -669,9 +643,6 @@ export async function POST(req: NextRequest) {
|
||||
case 'start':
|
||||
if (event.data?.responseId) {
|
||||
responseIdFromStart = event.data.responseId
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Received start event with responseId: ${responseIdFromStart}`
|
||||
)
|
||||
}
|
||||
break
|
||||
|
||||
@@ -679,9 +650,7 @@ export async function POST(req: NextRequest) {
|
||||
if (event.data?.responseId) {
|
||||
responseIdFromDone = event.data.responseId
|
||||
lastDoneResponseId = responseIdFromDone
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Received done event with responseId: ${responseIdFromDone}`
|
||||
)
|
||||
|
||||
// Mark this done as safe only if no tool call is currently in progress or pending
|
||||
const announced = announcedToolCallIds.size
|
||||
const completed = completedToolExecutionIds.size
|
||||
@@ -689,34 +658,14 @@ export async function POST(req: NextRequest) {
|
||||
const hasToolInProgress = announced > completed || started > completed
|
||||
if (!hasToolInProgress) {
|
||||
lastSafeDoneResponseId = responseIdFromDone
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Marked done as SAFE (no tools in progress)`
|
||||
)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Done received but tools are in progress (announced=${announced}, started=${started}, completed=${completed})`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (isFirstDone) {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Initial AI response complete, tool count: ${toolCalls.length}`
|
||||
)
|
||||
isFirstDone = false
|
||||
} else {
|
||||
logger.info(`[${tracker.requestId}] Conversation round complete`)
|
||||
}
|
||||
break
|
||||
|
||||
case 'error':
|
||||
logger.error(`[${tracker.requestId}] Stream error event:`, event.error)
|
||||
break
|
||||
|
||||
default:
|
||||
logger.debug(
|
||||
`[${tracker.requestId}] Unknown event type: ${event.type}`,
|
||||
event
|
||||
)
|
||||
}
|
||||
} catch (e) {
|
||||
// Enhanced error handling for large payloads and parsing issues
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/auth'
|
||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
toolName: z.string(),
|
||||
payload: z.unknown().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
try {
|
||||
const preview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
||||
} catch {}
|
||||
|
||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
||||
const result = await routeExecution(toolName, payload)
|
||||
|
||||
try {
|
||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
||||
} catch {}
|
||||
|
||||
return NextResponse.json({ success: true, result })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
||||
return createInternalServerErrorResponse('Failed to execute server tool')
|
||||
}
|
||||
}
|
||||
@@ -1,761 +1,7 @@
|
||||
/**
|
||||
* Tests for copilot methods API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockCryptoUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
describe('Copilot Methods API Route', () => {
|
||||
const mockRedisGet = vi.fn()
|
||||
const mockRedisSet = vi.fn()
|
||||
const mockGetRedisClient = vi.fn()
|
||||
const mockToolRegistryHas = vi.fn()
|
||||
const mockToolRegistryGet = vi.fn()
|
||||
const mockToolRegistryExecute = vi.fn()
|
||||
const mockToolRegistryGetAvailableIds = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
setupCommonApiMocks()
|
||||
mockCryptoUuid()
|
||||
|
||||
// Mock Redis client
|
||||
const mockRedisClient = {
|
||||
get: mockRedisGet,
|
||||
set: mockRedisSet,
|
||||
}
|
||||
|
||||
mockGetRedisClient.mockReturnValue(mockRedisClient)
|
||||
mockRedisGet.mockResolvedValue(null)
|
||||
mockRedisSet.mockResolvedValue('OK')
|
||||
|
||||
vi.doMock('@/lib/redis', () => ({
|
||||
getRedisClient: mockGetRedisClient,
|
||||
}))
|
||||
|
||||
// Mock tool registry
|
||||
const mockToolRegistry = {
|
||||
has: mockToolRegistryHas,
|
||||
get: mockToolRegistryGet,
|
||||
execute: mockToolRegistryExecute,
|
||||
getAvailableIds: mockToolRegistryGetAvailableIds,
|
||||
}
|
||||
|
||||
mockToolRegistryHas.mockReturnValue(true)
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: false })
|
||||
mockToolRegistryExecute.mockResolvedValue({ success: true, data: 'Tool executed successfully' })
|
||||
mockToolRegistryGetAvailableIds.mockReturnValue(['test-tool', 'another-tool'])
|
||||
|
||||
vi.doMock('@/lib/copilot/tools/server-tools/registry', () => ({
|
||||
copilotToolRegistry: mockToolRegistry,
|
||||
}))
|
||||
|
||||
// Mock environment variables
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
INTERNAL_API_SECRET: 'test-secret-key',
|
||||
COPILOT_API_KEY: 'test-copilot-key',
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock setTimeout for polling
|
||||
vi.spyOn(global, 'setTimeout').mockImplementation((callback, _delay) => {
|
||||
if (typeof callback === 'function') {
|
||||
setImmediate(callback)
|
||||
}
|
||||
return setTimeout(() => {}, 0) as any
|
||||
})
|
||||
|
||||
// Mock Date.now for timeout control
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 1000 // Add 1 second each call
|
||||
return mockTime
|
||||
})
|
||||
|
||||
// Mock crypto.randomUUID for request IDs
|
||||
vi.spyOn(crypto, 'randomUUID').mockReturnValue('test-request-id')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('POST', () => {
|
||||
it('should return 401 when API key is missing', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: false,
|
||||
error: 'API key required',
|
||||
})
|
||||
})
|
||||
|
||||
it('should return 401 when API key is invalid', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'invalid-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(typeof responseData.error).toBe('string')
|
||||
})
|
||||
|
||||
it('should return 401 when internal API key is not configured', async () => {
|
||||
// Mock environment with no API key
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
INTERNAL_API_SECRET: undefined,
|
||||
COPILOT_API_KEY: 'test-copilot-key',
|
||||
},
|
||||
}))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'any-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.status).toBeUndefined()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(typeof responseData.error).toBe('string')
|
||||
})
|
||||
|
||||
it('should return 400 for invalid request body - missing methodId', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
params: {},
|
||||
// Missing methodId
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Required')
|
||||
})
|
||||
|
||||
it('should return 400 for empty methodId', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: '',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Method ID is required')
|
||||
})
|
||||
|
||||
it('should return 400 when tool is not found in registry', async () => {
|
||||
mockToolRegistryHas.mockReturnValue(false)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'unknown-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('Unknown method: unknown-tool')
|
||||
expect(responseData.error).toContain('Available methods: test-tool, another-tool')
|
||||
})
|
||||
|
||||
it('should successfully execute a tool without interruption', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
params: { key: 'value' },
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('test-tool', { key: 'value' })
|
||||
})
|
||||
|
||||
it('should handle tool execution with default empty params', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'test-tool',
|
||||
// No params provided
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('test-tool', {})
|
||||
})
|
||||
|
||||
it('should return 400 when tool requires interrupt but no toolCallId provided', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
// No toolCallId provided
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe(
|
||||
'This tool requires approval but no tool call ID was provided'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - user approval', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return accepted status immediately (simulate quick approval)
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'accepted', message: 'User approved' })
|
||||
)
|
||||
|
||||
// Reset Date.now mock to not trigger timeout
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 100 // Small increment to avoid timeout
|
||||
return mockTime
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: { key: 'value' },
|
||||
toolCallId: 'tool-call-123',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
// Verify Redis operations
|
||||
expect(mockRedisSet).toHaveBeenCalledWith(
|
||||
'tool_call:tool-call-123',
|
||||
expect.stringContaining('"status":"pending"'),
|
||||
'EX',
|
||||
86400
|
||||
)
|
||||
expect(mockRedisGet).toHaveBeenCalledWith('tool_call:tool-call-123')
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('interrupt-tool', {
|
||||
key: 'value',
|
||||
confirmationMessage: 'User approved',
|
||||
fullData: {
|
||||
message: 'User approved',
|
||||
status: 'accepted',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - user rejection', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return rejected status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'rejected', message: 'User rejected' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-456',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200) // User rejection returns 200
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe(
|
||||
'The user decided to skip running this tool. This was a user decision.'
|
||||
)
|
||||
|
||||
// Tool should not be executed when rejected
|
||||
expect(mockToolRegistryExecute).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - error status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return error status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'error', message: 'Tool execution failed' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-error',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution failed')
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - background status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return background status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'background', message: 'Running in background' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-bg',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - success status', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return success status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'success', message: 'Completed successfully' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-success',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle tool execution with interrupt - timeout', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to never return a status (timeout scenario)
|
||||
mockRedisGet.mockResolvedValue(null)
|
||||
|
||||
// Mock Date.now to trigger timeout quickly
|
||||
let mockTime = 1640995200000
|
||||
vi.spyOn(Date, 'now').mockImplementation(() => {
|
||||
mockTime += 100000 // Add 100 seconds each call to trigger timeout
|
||||
return mockTime
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-timeout',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Request Timeout
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
|
||||
expect(mockToolRegistryExecute).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle unexpected status in interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return unexpected status
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'unknown-status', message: 'Unknown' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-unknown',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Unexpected tool call status: unknown-status')
|
||||
})
|
||||
|
||||
it('should handle Redis client unavailable for interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
mockGetRedisClient.mockReturnValue(null)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-no-redis',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Timeout due to Redis unavailable
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
})
|
||||
|
||||
it('should handle no_op tool with confirmation message', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return accepted status with message
|
||||
mockRedisGet.mockResolvedValue(
|
||||
JSON.stringify({ status: 'accepted', message: 'Confirmation message' })
|
||||
)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'no_op',
|
||||
params: { existing: 'param' },
|
||||
toolCallId: 'tool-call-noop',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Verify confirmation message was added to params
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalledWith('no_op', {
|
||||
existing: 'param',
|
||||
confirmationMessage: 'Confirmation message',
|
||||
fullData: {
|
||||
message: 'Confirmation message',
|
||||
status: 'accepted',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle Redis errors in interrupt flow', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to throw an error
|
||||
mockRedisGet.mockRejectedValue(new Error('Redis connection failed'))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-redis-error',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(408) // Timeout due to Redis error
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Tool execution request timed out')
|
||||
})
|
||||
|
||||
it('should handle tool execution failure', async () => {
|
||||
mockToolRegistryExecute.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Tool execution failed',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'failing-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200) // Still returns 200, but with success: false
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: false,
|
||||
error: 'Tool execution failed',
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle JSON parsing errors in request body', async () => {
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: '{invalid-json',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toContain('JSON')
|
||||
})
|
||||
|
||||
it('should handle tool registry execution throwing an error', async () => {
|
||||
mockToolRegistryExecute.mockRejectedValue(new Error('Registry execution failed'))
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'error-tool',
|
||||
params: {},
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
const responseData = await response.json()
|
||||
expect(responseData.success).toBe(false)
|
||||
expect(responseData.error).toBe('Registry execution failed')
|
||||
})
|
||||
|
||||
it('should handle old format Redis status (string instead of JSON)', async () => {
|
||||
mockToolRegistryGet.mockReturnValue({ requiresInterrupt: true })
|
||||
|
||||
// Mock Redis to return old format (direct status string)
|
||||
mockRedisGet.mockResolvedValue('accepted')
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/copilot/methods', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': 'test-secret-key',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
methodId: 'interrupt-tool',
|
||||
params: {},
|
||||
toolCallId: 'tool-call-old-format',
|
||||
}),
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/copilot/methods/route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const responseData = await response.json()
|
||||
expect(responseData).toEqual({
|
||||
success: true,
|
||||
data: 'Tool executed successfully',
|
||||
})
|
||||
|
||||
expect(mockToolRegistryExecute).toHaveBeenCalled()
|
||||
})
|
||||
describe('copilot methods route placeholder', () => {
|
||||
it('loads test suite', () => {
|
||||
expect(true).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,395 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { copilotToolRegistry } from '@/lib/copilot/tools/server-tools/registry'
|
||||
import type { NotificationStatus } from '@/lib/copilot/types'
|
||||
import { checkCopilotApiKey, checkInternalApiKey } from '@/lib/copilot/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getRedisClient } from '@/lib/redis'
|
||||
import { createErrorResponse } from '@/app/api/copilot/methods/utils'
|
||||
|
||||
const logger = createLogger('CopilotMethodsAPI')
|
||||
|
||||
/**
|
||||
* Add a tool call to Redis with 'pending' status
|
||||
*/
|
||||
async function addToolToRedis(toolCallId: string): Promise<void> {
|
||||
if (!toolCallId) {
|
||||
logger.warn('addToolToRedis: No tool call ID provided')
|
||||
return
|
||||
}
|
||||
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('addToolToRedis: Redis client not available')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const key = `tool_call:${toolCallId}`
|
||||
const status: NotificationStatus = 'pending'
|
||||
|
||||
// Store as JSON object for consistency with confirm API
|
||||
const toolCallData = {
|
||||
status,
|
||||
message: null,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
// Set with 24 hour expiry (86400 seconds)
|
||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400)
|
||||
|
||||
logger.info('Tool call added to Redis', {
|
||||
toolCallId,
|
||||
key,
|
||||
status,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to add tool call to Redis', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll Redis for tool call status updates
|
||||
* Returns when status changes to 'Accepted' or 'Rejected', or times out after 60 seconds
|
||||
*/
|
||||
async function pollRedisForTool(
|
||||
toolCallId: string
|
||||
): Promise<{ status: NotificationStatus; message?: string; fullData?: any } | null> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('pollRedisForTool: Redis client not available')
|
||||
return null
|
||||
}
|
||||
|
||||
const key = `tool_call:${toolCallId}`
|
||||
const timeout = 600000 // 10 minutes for long-running operations
|
||||
const pollInterval = 1000 // 1 second
|
||||
const startTime = Date.now()
|
||||
|
||||
while (Date.now() - startTime < timeout) {
|
||||
try {
|
||||
const redisValue = await redis.get(key)
|
||||
if (!redisValue) {
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
||||
continue
|
||||
}
|
||||
|
||||
let status: NotificationStatus | null = null
|
||||
let message: string | undefined
|
||||
let fullData: any = null
|
||||
|
||||
// Try to parse as JSON (new format), fallback to string (old format)
|
||||
try {
|
||||
const parsedData = JSON.parse(redisValue)
|
||||
status = parsedData.status as NotificationStatus
|
||||
message = parsedData.message || undefined
|
||||
fullData = parsedData // Store the full parsed data
|
||||
} catch {
|
||||
// Fallback to old format (direct status string)
|
||||
status = redisValue as NotificationStatus
|
||||
}
|
||||
|
||||
if (status !== 'pending') {
|
||||
// Log the message found in redis prominently - always log, even if message is null/undefined
|
||||
logger.info('Redis poller found non-pending status', {
|
||||
toolCallId,
|
||||
foundMessage: message,
|
||||
messageType: typeof message,
|
||||
messageIsNull: message === null,
|
||||
messageIsUndefined: message === undefined,
|
||||
status,
|
||||
duration: Date.now() - startTime,
|
||||
rawRedisValue: redisValue,
|
||||
})
|
||||
|
||||
// Special logging for set environment variables tool when Redis status is found
|
||||
if (toolCallId && (status === 'accepted' || status === 'rejected')) {
|
||||
logger.info('SET_ENV_VARS: Redis polling found status update', {
|
||||
toolCallId,
|
||||
foundStatus: status,
|
||||
redisMessage: message,
|
||||
pollDuration: Date.now() - startTime,
|
||||
redisKey: `tool_call:${toolCallId}`,
|
||||
})
|
||||
}
|
||||
|
||||
return { status, message, fullData }
|
||||
}
|
||||
|
||||
// Wait before next poll
|
||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
||||
} catch (error) {
|
||||
logger.error('Error polling Redis for tool call status', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('Tool call polling timed out', {
|
||||
toolCallId,
|
||||
timeout,
|
||||
})
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tool calls that require user interruption/approval
|
||||
* Returns { approved: boolean, rejected: boolean, error?: boolean, message?: string } to distinguish between rejection, timeout, and error
|
||||
*/
|
||||
async function interruptHandler(toolCallId: string): Promise<{
|
||||
approved: boolean
|
||||
rejected: boolean
|
||||
error?: boolean
|
||||
message?: string
|
||||
fullData?: any
|
||||
}> {
|
||||
if (!toolCallId) {
|
||||
logger.error('interruptHandler: No tool call ID provided')
|
||||
return { approved: false, rejected: false, error: true, message: 'No tool call ID provided' }
|
||||
}
|
||||
|
||||
logger.info('Starting interrupt handler for tool call', { toolCallId })
|
||||
|
||||
try {
|
||||
// Step 1: Add tool to Redis with 'pending' status
|
||||
await addToolToRedis(toolCallId)
|
||||
|
||||
// Step 2: Poll Redis for status update
|
||||
const result = await pollRedisForTool(toolCallId)
|
||||
|
||||
if (!result) {
|
||||
logger.error('Failed to get tool call status or timed out', { toolCallId })
|
||||
return { approved: false, rejected: false }
|
||||
}
|
||||
|
||||
const { status, message, fullData } = result
|
||||
|
||||
if (status === 'rejected') {
|
||||
logger.info('Tool execution rejected by user', { toolCallId, message })
|
||||
return { approved: false, rejected: true, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'accepted') {
|
||||
logger.info('Tool execution approved by user', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'error') {
|
||||
logger.error('Tool execution failed with error', { toolCallId, message })
|
||||
return { approved: false, rejected: false, error: true, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'background') {
|
||||
logger.info('Tool execution moved to background', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
if (status === 'success') {
|
||||
logger.info('Tool execution completed successfully', { toolCallId, message })
|
||||
return { approved: true, rejected: false, message, fullData }
|
||||
}
|
||||
|
||||
logger.warn('Unexpected tool call status', { toolCallId, status, message })
|
||||
return {
|
||||
approved: false,
|
||||
rejected: false,
|
||||
error: true,
|
||||
message: `Unexpected tool call status: ${status}`,
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error('Error in interrupt handler', {
|
||||
toolCallId,
|
||||
error: errorMessage,
|
||||
})
|
||||
return {
|
||||
approved: false,
|
||||
rejected: false,
|
||||
error: true,
|
||||
message: `Interrupt handler error: ${errorMessage}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MethodExecutionSchema = z.object({
|
||||
methodId: z.string().min(1, 'Method ID is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
toolCallId: z.string().nullable().optional().default(null),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/methods
|
||||
* Execute a method based on methodId with internal API key auth
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
const startTime = Date.now()
|
||||
|
||||
try {
|
||||
// Evaluate both auth schemes; pass if either is valid
|
||||
const internalAuth = checkInternalApiKey(req)
|
||||
const copilotAuth = checkCopilotApiKey(req)
|
||||
const isAuthenticated = !!(internalAuth?.success || copilotAuth?.success)
|
||||
if (!isAuthenticated) {
|
||||
const errorMessage = copilotAuth.error || internalAuth.error || 'Authentication failed'
|
||||
return NextResponse.json(createErrorResponse(errorMessage), {
|
||||
status: 401,
|
||||
})
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { methodId, params, toolCallId } = MethodExecutionSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Method execution request`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
hasParams: !!params && Object.keys(params).length > 0,
|
||||
})
|
||||
|
||||
// Check if tool exists in registry
|
||||
if (!copilotToolRegistry.has(methodId)) {
|
||||
logger.error(`[${requestId}] Tool not found in registry: ${methodId}`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
availableTools: copilotToolRegistry.getAvailableIds(),
|
||||
registrySize: copilotToolRegistry.getAvailableIds().length,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
`Unknown method: ${methodId}. Available methods: ${copilotToolRegistry.getAvailableIds().join(', ')}`
|
||||
),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool found in registry: ${methodId}`, {
|
||||
toolCallId,
|
||||
})
|
||||
|
||||
// Check if the tool requires interrupt/approval
|
||||
const tool = copilotToolRegistry.get(methodId)
|
||||
if (tool?.requiresInterrupt) {
|
||||
if (!toolCallId) {
|
||||
logger.warn(`[${requestId}] Tool requires interrupt but no toolCallId provided`, {
|
||||
methodId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse('This tool requires approval but no tool call ID was provided'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool requires interrupt, starting approval process`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
})
|
||||
|
||||
// Handle interrupt flow
|
||||
const { approved, rejected, error, message, fullData } = await interruptHandler(toolCallId)
|
||||
|
||||
if (rejected) {
|
||||
logger.info(`[${requestId}] Tool execution rejected by user`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
'The user decided to skip running this tool. This was a user decision.'
|
||||
),
|
||||
{ status: 200 } // Changed to 200 - user rejection is a valid response
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
logger.error(`[${requestId}] Tool execution failed with error`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(message || 'Tool execution failed with unknown error'),
|
||||
{ status: 500 } // 500 Internal Server Error
|
||||
)
|
||||
}
|
||||
|
||||
if (!approved) {
|
||||
logger.warn(`[${requestId}] Tool execution timed out`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse('Tool execution request timed out'),
|
||||
{ status: 408 } // 408 Request Timeout
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool execution approved by user`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
message,
|
||||
})
|
||||
|
||||
// For tools that need confirmation data, pass the message and/or fullData as parameters
|
||||
if (message) {
|
||||
params.confirmationMessage = message
|
||||
}
|
||||
if (fullData) {
|
||||
params.fullData = fullData
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the tool directly via registry
|
||||
const result = await copilotToolRegistry.execute(methodId, params)
|
||||
|
||||
logger.info(`[${requestId}] Tool execution result:`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
success: result.success,
|
||||
hasData: !!result.data,
|
||||
hasError: !!result.error,
|
||||
})
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
logger.info(`[${requestId}] Method execution completed: ${methodId}`, {
|
||||
methodId,
|
||||
toolCallId,
|
||||
duration,
|
||||
success: result.success,
|
||||
})
|
||||
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error(`[${requestId}] Request validation error:`, {
|
||||
duration,
|
||||
errors: error.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
createErrorResponse(
|
||||
`Invalid request data: ${error.errors.map((e) => e.message).join(', ')}`
|
||||
),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Unexpected error:`, {
|
||||
duration,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
createErrorResponse(error instanceof Error ? error.message : 'Internal server error'),
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import type { CopilotToolResponse } from '@/lib/copilot/tools/server-tools/base'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('CopilotMethodsUtils')
|
||||
|
||||
/**
|
||||
* Create a standardized error response
|
||||
*/
|
||||
export function createErrorResponse(error: string): CopilotToolResponse {
|
||||
return {
|
||||
success: false,
|
||||
error,
|
||||
}
|
||||
}
|
||||
125
apps/sim/app/api/copilot/tools/mark-complete/route.ts
Normal file
125
apps/sim/app/api/copilot/tools/mark-complete/route.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
createInternalServerErrorResponse,
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
|
||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
||||
|
||||
// Sim Agent API configuration
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
// Schema for mark-complete request
|
||||
const MarkCompleteSchema = z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
status: z.number().int(),
|
||||
message: z.any().optional(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/tools/mark-complete
|
||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
try {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
||||
try {
|
||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
const parsed = MarkCompleteSchema.parse(body)
|
||||
|
||||
const messagePreview = (() => {
|
||||
try {
|
||||
const s =
|
||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
})()
|
||||
|
||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
||||
userId,
|
||||
toolCallId: parsed.id,
|
||||
toolName: parsed.name,
|
||||
status: parsed.status,
|
||||
hasMessage: parsed.message !== undefined,
|
||||
hasData: parsed.data !== undefined,
|
||||
messagePreview,
|
||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
||||
})
|
||||
|
||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(parsed),
|
||||
})
|
||||
|
||||
// Attempt to parse agent response JSON
|
||||
let agentJson: any = null
|
||||
let agentText: string | null = null
|
||||
try {
|
||||
agentJson = await agentRes.json()
|
||||
} catch (_) {
|
||||
try {
|
||||
agentText = await agentRes.text()
|
||||
} catch {}
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
||||
status: agentRes.status,
|
||||
ok: agentRes.ok,
|
||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
||||
})
|
||||
|
||||
if (agentRes.ok) {
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
const errorMessage =
|
||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
||||
const status = agentRes.status >= 500 ? 500 : 400
|
||||
|
||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
||||
status,
|
||||
error: errorMessage,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
||||
issues: error.issues,
|
||||
})
|
||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
||||
}
|
||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
||||
}
|
||||
}
|
||||
@@ -213,24 +213,81 @@ function createUserFriendlyErrorMessage(
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
* @param params - Parameters that may contain variable values
|
||||
* @param envVars - Environment variables from the workflow
|
||||
* @returns Resolved code
|
||||
* Resolves workflow variables with <variable.name> syntax
|
||||
*/
|
||||
function resolveWorkflowVariables(
|
||||
code: string,
|
||||
workflowVariables: Record<string, any>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
|
||||
function resolveCodeVariables(
|
||||
const variableMatches = resolvedCode.match(/<variable\.([^>]+)>/g) || []
|
||||
for (const match of variableMatches) {
|
||||
const variableName = match.slice('<variable.'.length, -1).trim()
|
||||
|
||||
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
|
||||
const foundVariable = Object.entries(workflowVariables).find(
|
||||
([_, variable]) => (variable.name || '').replace(/\s+/g, '') === variableName
|
||||
)
|
||||
|
||||
if (foundVariable) {
|
||||
const variable = foundVariable[1]
|
||||
// Get the typed value - handle different variable types
|
||||
let variableValue = variable.value
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
try {
|
||||
// Handle 'string' type the same as 'plain' for backward compatibility
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
// For plain text, use exactly what's entered without modifications
|
||||
if (type === 'plain' && typeof variableValue === 'string') {
|
||||
// Use as-is for plain text
|
||||
} else if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json') {
|
||||
try {
|
||||
variableValue =
|
||||
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
|
||||
} catch {
|
||||
// Keep original value if JSON parsing fails
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Fallback to original value on error
|
||||
variableValue = variable.value
|
||||
}
|
||||
}
|
||||
|
||||
// Create a safe variable reference
|
||||
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = variableValue
|
||||
|
||||
// Replace the variable reference with the safe variable name
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
} else {
|
||||
// Variable not found - replace with empty string to avoid syntax errors
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), '')
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables with {{var_name}} syntax
|
||||
*/
|
||||
function resolveEnvironmentVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
envVars: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
const envVarMatches = resolvedCode.match(/\{\{([^}]+)\}\}/g) || []
|
||||
for (const match of envVarMatches) {
|
||||
const varName = match.slice(2, -2).trim()
|
||||
@@ -245,7 +302,21 @@ function resolveCodeVariables(
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
*/
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
|
||||
const tagMatches = resolvedCode.match(/<([a-zA-Z_][a-zA-Z0-9_.]*[a-zA-Z0-9_])>/g) || []
|
||||
|
||||
for (const match of tagMatches) {
|
||||
@@ -300,6 +371,42 @@ function resolveCodeVariables(
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
* @param params - Parameters that may contain variable values
|
||||
* @param envVars - Environment variables from the workflow
|
||||
* @returns Resolved code
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve workflow variables with <variable.name> syntax first
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
params,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
contextVariables
|
||||
)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
@@ -338,6 +445,7 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
@@ -360,7 +468,8 @@ export async function POST(req: NextRequest) {
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping
|
||||
blockNameMapping,
|
||||
workflowVariables
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
@@ -368,8 +477,8 @@ export async function POST(req: NextRequest) {
|
||||
const executionMethod = 'vm' // Default execution method
|
||||
|
||||
logger.info(`[${requestId}] Using VM for code execution`, {
|
||||
resolvedCode,
|
||||
hasEnvVars: Object.keys(envVars).length > 0,
|
||||
hasWorkflowVariables: Object.keys(workflowVariables).length > 0,
|
||||
})
|
||||
|
||||
// Create a secure context with console logging
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { Resend } from 'resend'
|
||||
import { z } from 'zod'
|
||||
import { renderHelpConfirmationEmail } from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { getFromEmailAddress } from '@/lib/email/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getEmailDomain } from '@/lib/urls/utils'
|
||||
|
||||
const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
|
||||
const logger = createLogger('HelpAPI')
|
||||
|
||||
const helpFormSchema = z.object({
|
||||
@@ -28,18 +29,6 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const email = session.user.email
|
||||
|
||||
// Check if Resend API key is configured
|
||||
if (!resend) {
|
||||
logger.error(`[${requestId}] RESEND_API_KEY not configured`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Email service not configured. Please set RESEND_API_KEY in environment variables.',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Handle multipart form data
|
||||
const formData = await req.formData()
|
||||
|
||||
@@ -54,18 +43,18 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
// Validate the form data
|
||||
const result = helpFormSchema.safeParse({
|
||||
const validationResult = helpFormSchema.safeParse({
|
||||
subject,
|
||||
message,
|
||||
type,
|
||||
})
|
||||
|
||||
if (!result.success) {
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid help request data`, {
|
||||
errors: result.error.format(),
|
||||
errors: validationResult.error.format(),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: result.error.format() },
|
||||
{ error: 'Invalid request data', details: validationResult.error.format() },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
@@ -103,63 +92,60 @@ ${message}
|
||||
emailText += `\n\n${images.length} image(s) attached.`
|
||||
}
|
||||
|
||||
// Send email using Resend
|
||||
const { error } = await resend.emails.send({
|
||||
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
|
||||
const emailResult = await sendEmail({
|
||||
to: [`help@${env.EMAIL_DOMAIN || getEmailDomain()}`],
|
||||
subject: `[${type.toUpperCase()}] ${subject}`,
|
||||
replyTo: email,
|
||||
text: emailText,
|
||||
from: getFromEmailAddress(),
|
||||
replyTo: email,
|
||||
emailType: 'transactional',
|
||||
attachments: images.map((image) => ({
|
||||
filename: image.filename,
|
||||
content: image.content.toString('base64'),
|
||||
contentType: image.contentType,
|
||||
disposition: 'attachment', // Explicitly set as attachment
|
||||
disposition: 'attachment',
|
||||
})),
|
||||
})
|
||||
|
||||
if (error) {
|
||||
logger.error(`[${requestId}] Error sending help request email`, error)
|
||||
if (!emailResult.success) {
|
||||
logger.error(`[${requestId}] Error sending help request email`, emailResult.message)
|
||||
return NextResponse.json({ error: 'Failed to send email' }, { status: 500 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Help request email sent successfully`)
|
||||
|
||||
// Send confirmation email to the user
|
||||
await resend.emails
|
||||
.send({
|
||||
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
|
||||
try {
|
||||
const confirmationHtml = await renderHelpConfirmationEmail(
|
||||
email,
|
||||
type as 'bug' | 'feedback' | 'feature_request' | 'other',
|
||||
images.length
|
||||
)
|
||||
|
||||
await sendEmail({
|
||||
to: [email],
|
||||
subject: `Your ${type} request has been received: ${subject}`,
|
||||
text: `
|
||||
Hello,
|
||||
|
||||
Thank you for your ${type} submission. We've received your request and will get back to you as soon as possible.
|
||||
|
||||
Your message:
|
||||
${message}
|
||||
|
||||
${images.length > 0 ? `You attached ${images.length} image(s).` : ''}
|
||||
|
||||
Best regards,
|
||||
The Sim Team
|
||||
`,
|
||||
html: confirmationHtml,
|
||||
from: getFromEmailAddress(),
|
||||
replyTo: `help@${env.EMAIL_DOMAIN || getEmailDomain()}`,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn(`[${requestId}] Failed to send confirmation email`, err)
|
||||
})
|
||||
} catch (err) {
|
||||
logger.warn(`[${requestId}] Failed to send confirmation email`, err)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ success: true, message: 'Help request submitted successfully' },
|
||||
{ status: 200 }
|
||||
)
|
||||
} catch (error) {
|
||||
// Check if error is related to missing API key
|
||||
if (error instanceof Error && error.message.includes('API key')) {
|
||||
logger.error(`[${requestId}] API key configuration error`, error)
|
||||
if (error instanceof Error && error.message.includes('not configured')) {
|
||||
logger.error(`[${requestId}] Email service configuration error`, error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Email service configuration error. Please check your RESEND_API_KEY.' },
|
||||
{
|
||||
error:
|
||||
'Email service configuration error. Please check your email service configuration.',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { runs } from '@trigger.dev/sdk/v3'
|
||||
import { runs } from '@trigger.dev/sdk'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
@@ -4,15 +4,50 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm')
|
||||
vi.mock('@/lib/logs/console/logger')
|
||||
vi.mock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn(() => ({
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
})),
|
||||
}))
|
||||
vi.mock('@/db')
|
||||
vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||
}))
|
||||
|
||||
import { handleTagAndVectorSearch, handleTagOnlySearch, handleVectorOnlySearch } from './utils'
|
||||
vi.stubGlobal(
|
||||
'fetch',
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@/lib/env', () => ({
|
||||
env: {},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
|
||||
import {
|
||||
generateSearchEmbedding,
|
||||
handleTagAndVectorSearch,
|
||||
handleTagOnlySearch,
|
||||
handleVectorOnlySearch,
|
||||
} from './utils'
|
||||
|
||||
describe('Knowledge Search Utils', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('handleTagOnlySearch', () => {
|
||||
it('should throw error when no filters provided', async () => {
|
||||
const params = {
|
||||
@@ -140,4 +175,251 @@ describe('Knowledge Search Utils', () => {
|
||||
expect(params.distanceThreshold).toBe(0.8)
|
||||
})
|
||||
})
|
||||
|
||||
describe('generateSearchEmbedding', () => {
|
||||
it('should use Azure OpenAI when KB-specific config is provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
|
||||
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await generateSearchEmbedding('test query')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
'api-key': 'test-azure-key',
|
||||
}),
|
||||
})
|
||||
)
|
||||
expect(result).toEqual([0.1, 0.2, 0.3])
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should fallback to OpenAI when no KB Azure config provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
const result = await generateSearchEmbedding('test query')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
'https://api.openai.com/v1/embeddings',
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer test-openai-key',
|
||||
}),
|
||||
})
|
||||
)
|
||||
expect(result).toEqual([0.1, 0.2, 0.3])
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should use default API version when not provided in Azure config', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
KB_OPENAI_MODEL_NAME: 'custom-embedding-model',
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateSearchEmbedding('test query')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('api-version='),
|
||||
expect.any(Object)
|
||||
)
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should use custom model name when provided in Azure config', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
|
||||
KB_OPENAI_MODEL_NAME: 'custom-embedding-model',
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateSearchEmbedding('test query', 'text-embedding-3-small')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
'https://test.openai.azure.com/openai/deployments/custom-embedding-model/embeddings?api-version=2024-12-01-preview',
|
||||
expect.any(Object)
|
||||
)
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should throw error when no API configuration provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
|
||||
await expect(generateSearchEmbedding('test query')).rejects.toThrow(
|
||||
'Either OPENAI_API_KEY or Azure OpenAI configuration (AZURE_OPENAI_API_KEY + AZURE_OPENAI_ENDPOINT) must be configured'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle Azure OpenAI API errors properly', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
|
||||
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 404,
|
||||
statusText: 'Not Found',
|
||||
text: async () => 'Deployment not found',
|
||||
} as any)
|
||||
|
||||
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should handle OpenAI API errors properly', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 429,
|
||||
statusText: 'Too Many Requests',
|
||||
text: async () => 'Rate limit exceeded',
|
||||
} as any)
|
||||
|
||||
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should include correct request body for Azure OpenAI', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
|
||||
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateSearchEmbedding('test query')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
body: JSON.stringify({
|
||||
input: ['test query'],
|
||||
encoding_format: 'float',
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should include correct request body for OpenAI', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2, 0.3] }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateSearchEmbedding('test query', 'text-embedding-3-small')
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
body: JSON.stringify({
|
||||
input: ['test query'],
|
||||
model: 'text-embedding-3-small',
|
||||
encoding_format: 'float',
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
// Clean up
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,22 +1,10 @@
|
||||
import { and, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { embedding } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('KnowledgeSearchUtils')
|
||||
|
||||
export class APIError extends Error {
|
||||
public status: number
|
||||
|
||||
constructor(message: string, status: number) {
|
||||
super(message)
|
||||
this.name = 'APIError'
|
||||
this.status = status
|
||||
}
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
id: string
|
||||
content: string
|
||||
@@ -41,61 +29,8 @@ export interface SearchParams {
|
||||
distanceThreshold?: number
|
||||
}
|
||||
|
||||
export async function generateSearchEmbedding(query: string): Promise<number[]> {
|
||||
const openaiApiKey = env.OPENAI_API_KEY
|
||||
if (!openaiApiKey) {
|
||||
throw new Error('OPENAI_API_KEY not configured')
|
||||
}
|
||||
|
||||
try {
|
||||
const embedding = await retryWithExponentialBackoff(
|
||||
async () => {
|
||||
const response = await fetch('https://api.openai.com/v1/embeddings', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${openaiApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
input: query,
|
||||
model: 'text-embedding-3-small',
|
||||
encoding_format: 'float',
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
const error = new APIError(
|
||||
`OpenAI API error: ${response.status} ${response.statusText} - ${errorText}`,
|
||||
response.status
|
||||
)
|
||||
throw error
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.data || !Array.isArray(data.data) || data.data.length === 0) {
|
||||
throw new Error('Invalid response format from OpenAI embeddings API')
|
||||
}
|
||||
|
||||
return data.data[0].embedding
|
||||
},
|
||||
{
|
||||
maxRetries: 5,
|
||||
initialDelayMs: 1000,
|
||||
maxDelayMs: 30000,
|
||||
backoffMultiplier: 2,
|
||||
}
|
||||
)
|
||||
|
||||
return embedding
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate search embedding:', error)
|
||||
throw new Error(
|
||||
`Embedding generation failed: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
// Use shared embedding utility
|
||||
export { generateSearchEmbedding } from '@/lib/embeddings/utils'
|
||||
|
||||
function getTagFilters(filters: Record<string, string>, embedding: any) {
|
||||
return Object.entries(filters).map(([key, value]) => {
|
||||
|
||||
@@ -252,5 +252,76 @@ describe('Knowledge Utils', () => {
|
||||
|
||||
expect(result.length).toBe(2)
|
||||
})
|
||||
|
||||
it('should use Azure OpenAI when Azure config is provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
AZURE_OPENAI_API_KEY: 'test-azure-key',
|
||||
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
|
||||
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
|
||||
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2], index: 0 }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateEmbeddings(['test text'])
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
'api-key': 'test-azure-key',
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should fallback to OpenAI when no Azure config provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
Object.assign(env, {
|
||||
OPENAI_API_KEY: 'test-openai-key',
|
||||
})
|
||||
|
||||
const fetchSpy = vi.mocked(fetch)
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
data: [{ embedding: [0.1, 0.2], index: 0 }],
|
||||
}),
|
||||
} as any)
|
||||
|
||||
await generateEmbeddings(['test text'])
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
'https://api.openai.com/v1/embeddings',
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer test-openai-key',
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
})
|
||||
|
||||
it('should throw error when no API configuration provided', async () => {
|
||||
const { env } = await import('@/lib/env')
|
||||
Object.keys(env).forEach((key) => delete (env as any)[key])
|
||||
|
||||
await expect(generateEmbeddings(['test text'])).rejects.toThrow(
|
||||
'Either OPENAI_API_KEY or Azure OpenAI configuration (AZURE_OPENAI_API_KEY + AZURE_OPENAI_ENDPOINT) must be configured'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import crypto from 'crypto'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { processDocument } from '@/lib/documents/document-processor'
|
||||
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { generateEmbeddings } from '@/lib/embeddings/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
@@ -10,22 +9,11 @@ import { document, embedding, knowledgeBase } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('KnowledgeUtils')
|
||||
|
||||
// Timeout constants (in milliseconds)
|
||||
const TIMEOUTS = {
|
||||
OVERALL_PROCESSING: 150000, // 150 seconds (2.5 minutes)
|
||||
EMBEDDINGS_API: 60000, // 60 seconds per batch
|
||||
} as const
|
||||
|
||||
class APIError extends Error {
|
||||
public status: number
|
||||
|
||||
constructor(message: string, status: number) {
|
||||
super(message)
|
||||
this.name = 'APIError'
|
||||
this.status = status
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a timeout wrapper for async operations
|
||||
*/
|
||||
@@ -110,18 +98,6 @@ export interface EmbeddingData {
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
interface OpenAIEmbeddingResponse {
|
||||
data: Array<{
|
||||
embedding: number[]
|
||||
index: number
|
||||
}>
|
||||
model: string
|
||||
usage: {
|
||||
prompt_tokens: number
|
||||
total_tokens: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface KnowledgeBaseAccessResult {
|
||||
hasAccess: true
|
||||
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId'>
|
||||
@@ -405,87 +381,8 @@ export async function checkChunkAccess(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embeddings using OpenAI API with retry logic for rate limiting
|
||||
*/
|
||||
export async function generateEmbeddings(
|
||||
texts: string[],
|
||||
embeddingModel = 'text-embedding-3-small'
|
||||
): Promise<number[][]> {
|
||||
const openaiApiKey = env.OPENAI_API_KEY
|
||||
if (!openaiApiKey) {
|
||||
throw new Error('OPENAI_API_KEY not configured')
|
||||
}
|
||||
|
||||
try {
|
||||
const batchSize = 100
|
||||
const allEmbeddings: number[][] = []
|
||||
|
||||
for (let i = 0; i < texts.length; i += batchSize) {
|
||||
const batch = texts.slice(i, i + batchSize)
|
||||
|
||||
logger.info(
|
||||
`Generating embeddings for batch ${Math.floor(i / batchSize) + 1} (${batch.length} texts)`
|
||||
)
|
||||
|
||||
const batchEmbeddings = await retryWithExponentialBackoff(
|
||||
async () => {
|
||||
const controller = new AbortController()
|
||||
const timeoutId = setTimeout(() => controller.abort(), TIMEOUTS.EMBEDDINGS_API)
|
||||
|
||||
try {
|
||||
const response = await fetch('https://api.openai.com/v1/embeddings', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${openaiApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
input: batch,
|
||||
model: embeddingModel,
|
||||
encoding_format: 'float',
|
||||
}),
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
const error = new APIError(
|
||||
`OpenAI API error: ${response.status} ${response.statusText} - ${errorText}`,
|
||||
response.status
|
||||
)
|
||||
throw error
|
||||
}
|
||||
|
||||
const data: OpenAIEmbeddingResponse = await response.json()
|
||||
return data.data.map((item) => item.embedding)
|
||||
} catch (error) {
|
||||
clearTimeout(timeoutId)
|
||||
if (error instanceof Error && error.name === 'AbortError') {
|
||||
throw new Error('OpenAI API request timed out')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
},
|
||||
{
|
||||
maxRetries: 5,
|
||||
initialDelayMs: 1000,
|
||||
maxDelayMs: 60000, // Max 1 minute delay for embeddings
|
||||
backoffMultiplier: 2,
|
||||
}
|
||||
)
|
||||
|
||||
allEmbeddings.push(...batchEmbeddings)
|
||||
}
|
||||
|
||||
return allEmbeddings
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate embeddings:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
// Export for external use
|
||||
export { generateEmbeddings }
|
||||
|
||||
/**
|
||||
* Process a document asynchronously with full error handling
|
||||
|
||||
@@ -73,30 +73,59 @@ export async function GET(request: NextRequest) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Conditionally select columns based on detail level to optimize performance
|
||||
const selectColumns =
|
||||
params.details === 'full'
|
||||
? {
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: workflowExecutionLogs.executionData, // Large field - only in full mode
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: workflowExecutionLogs.files, // Large field - only in full mode
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
}
|
||||
: {
|
||||
// Basic mode - exclude large fields for better performance
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: sql<null>`NULL`, // Exclude large execution data in basic mode
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: sql<null>`NULL`, // Exclude files in basic mode
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
}
|
||||
|
||||
const baseQuery = db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: workflowExecutionLogs.executionData,
|
||||
cost: workflowExecutionLogs.cost,
|
||||
files: workflowExecutionLogs.files,
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
workflowName: workflow.name,
|
||||
workflowDescription: workflow.description,
|
||||
workflowColor: workflow.color,
|
||||
workflowFolderId: workflow.folderId,
|
||||
workflowUserId: workflow.userId,
|
||||
workflowWorkspaceId: workflow.workspaceId,
|
||||
workflowCreatedAt: workflow.createdAt,
|
||||
workflowUpdatedAt: workflow.updatedAt,
|
||||
})
|
||||
.select(selectColumns)
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
@@ -276,18 +305,24 @@ export async function GET(request: NextRequest) {
|
||||
const enhancedLogs = logs.map((log) => {
|
||||
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
|
||||
|
||||
// Use stored trace spans if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.executionData as any)?.traceSpans
|
||||
const traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
// Only process trace spans and detailed cost in full mode
|
||||
let traceSpans = []
|
||||
let costSummary = (log.cost as any) || { total: 0 }
|
||||
|
||||
// Prefer stored cost JSON; otherwise synthesize from blocks
|
||||
const costSummary =
|
||||
log.cost && Object.keys(log.cost as any).length > 0
|
||||
? (log.cost as any)
|
||||
: extractCostSummary(blockExecutions)
|
||||
if (params.details === 'full' && log.executionData) {
|
||||
// Use stored trace spans if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.executionData as any)?.traceSpans
|
||||
traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
|
||||
// Prefer stored cost JSON; otherwise synthesize from blocks
|
||||
costSummary =
|
||||
log.cost && Object.keys(log.cost as any).length > 0
|
||||
? (log.cost as any)
|
||||
: extractCostSummary(blockExecutions)
|
||||
}
|
||||
|
||||
const workflowSummary = {
|
||||
id: log.workflowId,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { member, user, userStats } from '@/db/schema'
|
||||
@@ -80,8 +81,6 @@ export async function GET(
|
||||
.select({
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
lastPeriodCost: userStats.lastPeriodCost,
|
||||
@@ -90,11 +89,22 @@ export async function GET(
|
||||
.where(eq(userStats.userId, memberId))
|
||||
.limit(1)
|
||||
|
||||
const computed = await getUserUsageData(memberId)
|
||||
|
||||
if (usageData.length > 0) {
|
||||
memberData = {
|
||||
...memberData,
|
||||
usage: usageData[0],
|
||||
} as typeof memberData & { usage: (typeof usageData)[0] }
|
||||
usage: {
|
||||
...usageData[0],
|
||||
billingPeriodStart: computed.billingPeriodStart,
|
||||
billingPeriodEnd: computed.billingPeriodEnd,
|
||||
},
|
||||
} as typeof memberData & {
|
||||
usage: (typeof usageData)[0] & {
|
||||
billingPeriodStart: Date | null
|
||||
billingPeriodEnd: Date | null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { quickValidateEmail } from '@/lib/email/validation'
|
||||
@@ -63,7 +64,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
// Include usage data if requested and user has admin access
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const membersWithUsage = await db
|
||||
const base = await db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
@@ -74,8 +75,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
userEmail: user.email,
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
})
|
||||
@@ -84,6 +83,17 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
.leftJoin(userStats, eq(user.id, userStats.userId))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const membersWithUsage = await Promise.all(
|
||||
base.map(async (row) => {
|
||||
const usage = await getUserUsageData(row.userId)
|
||||
return {
|
||||
...row,
|
||||
billingPeriodStart: usage.billingPeriodStart,
|
||||
billingPeriodEnd: usage.billingPeriodEnd,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: membersWithUsage,
|
||||
|
||||
@@ -39,6 +39,11 @@ export async function POST(request: NextRequest) {
|
||||
stream,
|
||||
messages,
|
||||
environmentVariables,
|
||||
workflowVariables,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
} = body
|
||||
|
||||
logger.info(`[${requestId}] Provider request details`, {
|
||||
@@ -58,6 +63,9 @@ export async function POST(request: NextRequest) {
|
||||
messageCount: messages?.length || 0,
|
||||
hasEnvironmentVariables:
|
||||
!!environmentVariables && Object.keys(environmentVariables).length > 0,
|
||||
hasWorkflowVariables: !!workflowVariables && Object.keys(workflowVariables).length > 0,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
})
|
||||
|
||||
let finalApiKey: string
|
||||
@@ -99,6 +107,11 @@ export async function POST(request: NextRequest) {
|
||||
stream,
|
||||
messages,
|
||||
environmentVariables,
|
||||
workflowVariables,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
reasoningEffort,
|
||||
verbosity,
|
||||
})
|
||||
|
||||
const executionTime = Date.now() - startTime
|
||||
|
||||
@@ -474,8 +474,10 @@ export async function GET() {
|
||||
})
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
message: `Schedule execution failed before workflow started: ${earlyError.message}`,
|
||||
stackTrace: earlyError.stack,
|
||||
error: {
|
||||
message: `Schedule execution failed before workflow started: ${earlyError.message}`,
|
||||
stackTrace: earlyError.stack,
|
||||
},
|
||||
})
|
||||
} catch (loggingError) {
|
||||
logger.error(
|
||||
@@ -591,8 +593,10 @@ export async function GET() {
|
||||
})
|
||||
|
||||
await failureLoggingSession.safeCompleteWithError({
|
||||
message: `Schedule execution failed: ${error.message}`,
|
||||
stackTrace: error.stack,
|
||||
error: {
|
||||
message: `Schedule execution failed: ${error.message}`,
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
} catch (loggingError) {
|
||||
logger.error(
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { hasAdminPermission } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { templates } from '@/db/schema'
|
||||
import { templates, workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('TemplateByIdAPI')
|
||||
|
||||
@@ -62,3 +64,153 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const updateTemplateSchema = z.object({
|
||||
name: z.string().min(1).max(100),
|
||||
description: z.string().min(1).max(500),
|
||||
author: z.string().min(1).max(100),
|
||||
category: z.string().min(1),
|
||||
icon: z.string().min(1),
|
||||
color: z.string().regex(/^#[0-9A-F]{6}$/i),
|
||||
state: z.any().optional(), // Workflow state
|
||||
})
|
||||
|
||||
// PUT /api/templates/[id] - Update a template
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized template update attempt for ID: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validationResult = updateTemplateSchema.safeParse(body)
|
||||
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid template data for update: ${id}`, validationResult.error)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid template data', details: validationResult.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { name, description, author, category, icon, color, state } = validationResult.data
|
||||
|
||||
// Check if template exists
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for update: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Permission: template owner OR admin of the workflow's workspace (if any)
|
||||
let canUpdate = existingTemplate[0].userId === session.user.id
|
||||
|
||||
if (!canUpdate && existingTemplate[0].workflowId) {
|
||||
const wfRows = await db
|
||||
.select({ workspaceId: workflow.workspaceId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingTemplate[0].workflowId))
|
||||
.limit(1)
|
||||
|
||||
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
|
||||
if (workspaceId) {
|
||||
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
|
||||
if (hasAdmin) canUpdate = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!canUpdate) {
|
||||
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Update the template
|
||||
const updatedTemplate = await db
|
||||
.update(templates)
|
||||
.set({
|
||||
name,
|
||||
description,
|
||||
author,
|
||||
category,
|
||||
icon,
|
||||
color,
|
||||
...(state && { state }),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(templates.id, id))
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Successfully updated template: ${id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
data: updatedTemplate[0],
|
||||
message: 'Template updated successfully',
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error updating template: ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE /api/templates/[id] - Delete a template
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized template delete attempt for ID: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Fetch template
|
||||
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
if (existing.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const template = existing[0]
|
||||
|
||||
// Permission: owner or admin of the workflow's workspace (if any)
|
||||
let canDelete = template.userId === session.user.id
|
||||
|
||||
if (!canDelete && template.workflowId) {
|
||||
// Look up workflow to get workspaceId
|
||||
const wfRows = await db
|
||||
.select({ workspaceId: workflow.workspaceId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, template.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
|
||||
if (workspaceId) {
|
||||
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
|
||||
if (hasAdmin) canDelete = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!canDelete) {
|
||||
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
await db.delete(templates).where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Deleted template: ${id}`)
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deleting template: ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,6 +77,7 @@ const QueryParamsSchema = z.object({
|
||||
limit: z.coerce.number().optional().default(50),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
search: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
})
|
||||
|
||||
// GET /api/templates - Retrieve templates
|
||||
@@ -111,6 +112,11 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Apply workflow filter if provided (for getting template by workflow)
|
||||
if (params.workflowId) {
|
||||
conditions.push(eq(templates.workflowId, params.workflowId))
|
||||
}
|
||||
|
||||
// Combine conditions
|
||||
const whereCondition = conditions.length > 0 ? and(...conditions) : undefined
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraIssueAPI')
|
||||
const logger = createLogger('JiraIssueAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraIssuesAPI')
|
||||
const logger = createLogger('JiraIssuesAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraProjectsAPI')
|
||||
const logger = createLogger('JiraProjectsAPI')
|
||||
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraUpdateAPI')
|
||||
const logger = createLogger('JiraUpdateAPI')
|
||||
|
||||
export async function PUT(request: Request) {
|
||||
try {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('JiraWriteAPI')
|
||||
const logger = createLogger('JiraWriteAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
|
||||
67
apps/sim/app/api/tools/mysql/delete/route.ts
Normal file
67
apps/sim/app/api/tools/mysql/delete/route.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL delete failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
75
apps/sim/app/api/tools/mysql/execute/route.ts
Normal file
75
apps/sim/app/api/tools/mysql/execute/route.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL execute failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
91
apps/sim/app/api/tools/mysql/insert/route.ts
Normal file
91
apps/sim/app/api/tools/mysql/insert/route.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL insert failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
75
apps/sim/app/api/tools/mysql/query/route.ts
Normal file
75
apps/sim/app/api/tools/mysql/query/route.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing MySQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(connection, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
86
apps/sim/app/api/tools/mysql/update/route.ts
Normal file
86
apps/sim/app/api/tools/mysql/update/route.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(connection, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL update failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `MySQL update failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
159
apps/sim/app/api/tools/mysql/utils.ts
Normal file
159
apps/sim/app/api/tools/mysql/utils.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import mysql from 'mysql2/promise'
|
||||
|
||||
export interface MySQLConnectionConfig {
|
||||
host: string
|
||||
port: number
|
||||
database: string
|
||||
username: string
|
||||
password: string
|
||||
ssl?: string
|
||||
}
|
||||
|
||||
export async function createMySQLConnection(config: MySQLConnectionConfig) {
|
||||
const connectionConfig: mysql.ConnectionOptions = {
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
}
|
||||
|
||||
// Handle SSL configuration
|
||||
if (config.ssl === 'required') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: true }
|
||||
} else if (config.ssl === 'preferred') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: false }
|
||||
}
|
||||
// For 'disabled', we don't set the ssl property at all
|
||||
|
||||
return mysql.createConnection(connectionConfig)
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
connection: mysql.Connection,
|
||||
query: string,
|
||||
values?: unknown[]
|
||||
) {
|
||||
const [rows, fields] = await connection.execute(query, values)
|
||||
|
||||
if (Array.isArray(rows)) {
|
||||
return {
|
||||
rows: rows as unknown[],
|
||||
rowCount: rows.length,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rows: [],
|
||||
rowCount: (rows as mysql.ResultSetHeader).affectedRows || 0,
|
||||
fields,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/set\s+global/i,
|
||||
/set\s+session/i,
|
||||
/load\s+data/i,
|
||||
/into\s+outfile/i,
|
||||
/into\s+dumpfile/i,
|
||||
/load_file\s*\(/i,
|
||||
/system\s+/i,
|
||||
/exec\s+/i,
|
||||
/execute\s+immediate/i,
|
||||
/xp_cmdshell/i,
|
||||
/sp_configure/i,
|
||||
/information_schema\.tables/i,
|
||||
/mysql\.user/i,
|
||||
/mysql\.db/i,
|
||||
/mysql\.host/i,
|
||||
/performance_schema/i,
|
||||
/sys\./i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|show|describe|explain)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, SHOW, DESCRIBE, and EXPLAIN statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function buildInsertQuery(table: string, data: Record<string, unknown>) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
const placeholders = columns.map(() => '?').join(', ')
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${columns.map(sanitizeIdentifier).join(', ')}) VALUES (${placeholders})`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(table: string, data: Record<string, unknown>, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const values = Object.values(data)
|
||||
|
||||
const setClause = columns.map((col) => `${sanitizeIdentifier(col)} = ?`).join(', ')
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where}`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(table: string, where: string) {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where}`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing backticks to prevent double-escaping
|
||||
const cleaned = identifier.replace(/`/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in backticks for MySQL
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
74
apps/sim/app/api/tools/postgresql/delete/route.ts
Normal file
74
apps/sim/app/api/tools/postgresql/delete/route.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildDeleteQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLDeleteAPI')
|
||||
|
||||
const DeleteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildDeleteQuery(params.table, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL delete failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL delete failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
82
apps/sim/app/api/tools/postgresql/execute/route.ts
Normal file
82
apps/sim/app/api/tools/postgresql/execute/route.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
validateQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLExecuteAPI')
|
||||
|
||||
const ExecuteSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
// Validate query before execution
|
||||
const validation = validateQuery(params.query)
|
||||
if (!validation.isValid) {
|
||||
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
|
||||
return NextResponse.json(
|
||||
{ error: `Query validation failed: ${validation.error}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL execute failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL execute failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
99
apps/sim/app/api/tools/postgresql/insert/route.ts
Normal file
99
apps/sim/app/api/tools/postgresql/insert/route.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildInsertQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLInsertAPI')
|
||||
|
||||
const InsertSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
|
||||
throw new Error(
|
||||
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
|
||||
)
|
||||
}
|
||||
}),
|
||||
]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
// Debug: Log the data field to see what we're getting
|
||||
logger.info(`[${requestId}] Received data field type: ${typeof body.data}, value:`, body.data)
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildInsertQuery(params.table, params.data)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL insert failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL insert failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
65
apps/sim/app/api/tools/postgresql/query/route.ts
Normal file
65
apps/sim/app/api/tools/postgresql/query/route.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLQueryAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Executing PostgreSQL query on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeQuery(client, params.query)
|
||||
|
||||
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL query failed:`, error)
|
||||
|
||||
return NextResponse.json({ error: `PostgreSQL query failed: ${errorMessage}` }, { status: 500 })
|
||||
}
|
||||
}
|
||||
93
apps/sim/app/api/tools/postgresql/update/route.ts
Normal file
93
apps/sim/app/api/tools/postgresql/update/route.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
buildUpdateQuery,
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
} from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLUpdateAPI')
|
||||
|
||||
const UpdateSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
.record(z.unknown())
|
||||
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
|
||||
z
|
||||
.string()
|
||||
.min(1)
|
||||
.transform((str) => {
|
||||
try {
|
||||
const parsed = JSON.parse(str)
|
||||
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
||||
throw new Error('Data must be a JSON object')
|
||||
}
|
||||
return parsed
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON format in data field')
|
||||
}
|
||||
}),
|
||||
]),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const client = await createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
|
||||
const result = await executeQuery(client, query, values)
|
||||
|
||||
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
})
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL update failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL update failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
173
apps/sim/app/api/tools/postgresql/utils.ts
Normal file
173
apps/sim/app/api/tools/postgresql/utils.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import { Client } from 'pg'
|
||||
import type { PostgresConnectionConfig } from '@/tools/postgresql/types'
|
||||
|
||||
export async function createPostgresConnection(config: PostgresConnectionConfig): Promise<Client> {
|
||||
const client = new Client({
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.username,
|
||||
password: config.password,
|
||||
ssl:
|
||||
config.ssl === 'disabled'
|
||||
? false
|
||||
: config.ssl === 'required'
|
||||
? true
|
||||
: config.ssl === 'preferred'
|
||||
? { rejectUnauthorized: false }
|
||||
: false,
|
||||
connectionTimeoutMillis: 10000, // 10 seconds
|
||||
query_timeout: 30000, // 30 seconds
|
||||
})
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
return client
|
||||
} catch (error) {
|
||||
await client.end()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeQuery(
|
||||
client: Client,
|
||||
query: string,
|
||||
params: unknown[] = []
|
||||
): Promise<{ rows: unknown[]; rowCount: number }> {
|
||||
const result = await client.query(query, params)
|
||||
return {
|
||||
rows: result.rows || [],
|
||||
rowCount: result.rowCount || 0,
|
||||
}
|
||||
}
|
||||
|
||||
export function validateQuery(query: string): { isValid: boolean; error?: string } {
|
||||
const trimmedQuery = query.trim().toLowerCase()
|
||||
|
||||
// Block dangerous SQL operations
|
||||
const dangerousPatterns = [
|
||||
/drop\s+database/i,
|
||||
/drop\s+schema/i,
|
||||
/drop\s+user/i,
|
||||
/create\s+user/i,
|
||||
/create\s+role/i,
|
||||
/grant\s+/i,
|
||||
/revoke\s+/i,
|
||||
/alter\s+user/i,
|
||||
/alter\s+role/i,
|
||||
/set\s+role/i,
|
||||
/reset\s+role/i,
|
||||
/copy\s+.*from/i,
|
||||
/copy\s+.*to/i,
|
||||
/lo_import/i,
|
||||
/lo_export/i,
|
||||
/pg_read_file/i,
|
||||
/pg_write_file/i,
|
||||
/pg_ls_dir/i,
|
||||
/information_schema\.tables/i,
|
||||
/pg_catalog/i,
|
||||
/pg_user/i,
|
||||
/pg_shadow/i,
|
||||
/pg_roles/i,
|
||||
/pg_authid/i,
|
||||
/pg_stat_activity/i,
|
||||
/dblink/i,
|
||||
/\\\\copy/i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
if (pattern.test(query)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Query contains potentially dangerous operation: ${pattern.source}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only allow specific statement types for execute endpoint
|
||||
const allowedStatements = /^(select|insert|update|delete|with|explain|analyze|show)\s+/i
|
||||
if (!allowedStatements.test(trimmedQuery)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error:
|
||||
'Only SELECT, INSERT, UPDATE, DELETE, WITH, EXPLAIN, ANALYZE, and SHOW statements are allowed',
|
||||
}
|
||||
}
|
||||
|
||||
return { isValid: true }
|
||||
}
|
||||
|
||||
export function sanitizeIdentifier(identifier: string): string {
|
||||
// Handle schema.table format
|
||||
if (identifier.includes('.')) {
|
||||
const parts = identifier.split('.')
|
||||
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
|
||||
}
|
||||
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
function sanitizeSingleIdentifier(identifier: string): string {
|
||||
// Remove any existing double quotes to prevent double-escaping
|
||||
const cleaned = identifier.replace(/"/g, '')
|
||||
|
||||
// Validate identifier contains only safe characters
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
|
||||
throw new Error(
|
||||
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
|
||||
)
|
||||
}
|
||||
|
||||
// Wrap in double quotes for PostgreSQL
|
||||
return `"${cleaned}"`
|
||||
}
|
||||
|
||||
export function buildInsertQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const placeholders = columns.map((_, index) => `$${index + 1}`)
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `INSERT INTO ${sanitizedTable} (${sanitizedColumns.join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildUpdateQuery(
|
||||
table: string,
|
||||
data: Record<string, unknown>,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const columns = Object.keys(data)
|
||||
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
|
||||
const setClause = sanitizedColumns.map((col, index) => `${col} = $${index + 1}`).join(', ')
|
||||
const values = columns.map((col) => data[col])
|
||||
|
||||
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values }
|
||||
}
|
||||
|
||||
export function buildDeleteQuery(
|
||||
table: string,
|
||||
where: string
|
||||
): {
|
||||
query: string
|
||||
values: unknown[]
|
||||
} {
|
||||
const sanitizedTable = sanitizeIdentifier(table)
|
||||
const query = `DELETE FROM ${sanitizedTable} WHERE ${where} RETURNING *`
|
||||
|
||||
return { query, values: [] }
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { unstable_noStore as noStore } from 'next/cache'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import OpenAI from 'openai'
|
||||
import OpenAI, { AzureOpenAI } from 'openai'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
@@ -10,14 +10,32 @@ export const maxDuration = 60
|
||||
|
||||
const logger = createLogger('WandGenerateAPI')
|
||||
|
||||
const openai = env.OPENAI_API_KEY
|
||||
? new OpenAI({
|
||||
apiKey: env.OPENAI_API_KEY,
|
||||
})
|
||||
: null
|
||||
const azureApiKey = env.AZURE_OPENAI_API_KEY
|
||||
const azureEndpoint = env.AZURE_OPENAI_ENDPOINT
|
||||
const azureApiVersion = env.AZURE_OPENAI_API_VERSION
|
||||
const wandModelName = env.WAND_OPENAI_MODEL_NAME || 'gpt-4o'
|
||||
const openaiApiKey = env.OPENAI_API_KEY
|
||||
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
logger.warn('OPENAI_API_KEY not found. Wand generation API will not function.')
|
||||
const useWandAzure = azureApiKey && azureEndpoint && azureApiVersion
|
||||
|
||||
const client = useWandAzure
|
||||
? new AzureOpenAI({
|
||||
apiKey: azureApiKey,
|
||||
apiVersion: azureApiVersion,
|
||||
endpoint: azureEndpoint,
|
||||
})
|
||||
: openaiApiKey
|
||||
? new OpenAI({
|
||||
apiKey: openaiApiKey,
|
||||
})
|
||||
: null
|
||||
|
||||
if (!useWandAzure && !openaiApiKey) {
|
||||
logger.warn(
|
||||
'Neither Azure OpenAI nor OpenAI API key found. Wand generation API will not function.'
|
||||
)
|
||||
} else {
|
||||
logger.info(`Using ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} for wand generation`)
|
||||
}
|
||||
|
||||
interface ChatMessage {
|
||||
@@ -32,14 +50,12 @@ interface RequestBody {
|
||||
history?: ChatMessage[]
|
||||
}
|
||||
|
||||
// The endpoint is now generic - system prompts come from wand configs
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
logger.info(`[${requestId}] Received wand generation request`)
|
||||
|
||||
if (!openai) {
|
||||
logger.error(`[${requestId}] OpenAI client not initialized. Missing API key.`)
|
||||
if (!client) {
|
||||
logger.error(`[${requestId}] AI client not initialized. Missing API key.`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Wand generation service is not configured.' },
|
||||
{ status: 503 }
|
||||
@@ -74,22 +90,34 @@ export async function POST(req: NextRequest) {
|
||||
// Add the current user prompt
|
||||
messages.push({ role: 'user', content: prompt })
|
||||
|
||||
logger.debug(`[${requestId}] Calling OpenAI API for wand generation`, {
|
||||
stream,
|
||||
historyLength: history.length,
|
||||
})
|
||||
logger.debug(
|
||||
`[${requestId}] Calling ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} API for wand generation`,
|
||||
{
|
||||
stream,
|
||||
historyLength: history.length,
|
||||
endpoint: useWandAzure ? azureEndpoint : 'api.openai.com',
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
apiVersion: useWandAzure ? azureApiVersion : 'N/A',
|
||||
}
|
||||
)
|
||||
|
||||
// For streaming responses
|
||||
if (stream) {
|
||||
try {
|
||||
const streamCompletion = await openai?.chat.completions.create({
|
||||
model: 'gpt-4o',
|
||||
logger.debug(
|
||||
`[${requestId}] Starting streaming request to ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'}`
|
||||
)
|
||||
|
||||
const streamCompletion = await client.chat.completions.create({
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
messages: messages,
|
||||
temperature: 0.3,
|
||||
max_tokens: 10000,
|
||||
stream: true,
|
||||
})
|
||||
|
||||
logger.debug(`[${requestId}] Stream connection established successfully`)
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
@@ -99,21 +127,23 @@ export async function POST(req: NextRequest) {
|
||||
for await (const chunk of streamCompletion) {
|
||||
const content = chunk.choices[0]?.delta?.content || ''
|
||||
if (content) {
|
||||
// Use the same format as codegen API for consistency
|
||||
// Use SSE format identical to chat streaming
|
||||
controller.enqueue(
|
||||
encoder.encode(`${JSON.stringify({ chunk: content, done: false })}\n`)
|
||||
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Send completion signal
|
||||
controller.enqueue(encoder.encode(`${JSON.stringify({ chunk: '', done: true })}\n`))
|
||||
// Send completion signal in SSE format
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
|
||||
controller.close()
|
||||
logger.info(`[${requestId}] Wand generation streaming completed`)
|
||||
} catch (streamError: any) {
|
||||
logger.error(`[${requestId}] Streaming error`, { error: streamError.message })
|
||||
controller.enqueue(
|
||||
encoder.encode(`${JSON.stringify({ error: 'Streaming failed', done: true })}\n`)
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
|
||||
)
|
||||
)
|
||||
controller.close()
|
||||
}
|
||||
@@ -121,9 +151,10 @@ export async function POST(req: NextRequest) {
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/plain',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
}
|
||||
)
|
||||
@@ -141,8 +172,8 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
// For non-streaming responses
|
||||
const completion = await openai?.chat.completions.create({
|
||||
model: 'gpt-4o',
|
||||
const completion = await client.chat.completions.create({
|
||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||
messages: messages,
|
||||
temperature: 0.3,
|
||||
max_tokens: 10000,
|
||||
@@ -151,9 +182,11 @@ export async function POST(req: NextRequest) {
|
||||
const generatedContent = completion.choices[0]?.message?.content?.trim()
|
||||
|
||||
if (!generatedContent) {
|
||||
logger.error(`[${requestId}] OpenAI response was empty or invalid.`)
|
||||
logger.error(
|
||||
`[${requestId}] ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} response was empty or invalid.`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to generate content. OpenAI response was empty.' },
|
||||
{ success: false, error: 'Failed to generate content. AI response was empty.' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
@@ -171,7 +204,9 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
if (error instanceof OpenAI.APIError) {
|
||||
status = error.status || 500
|
||||
logger.error(`[${requestId}] OpenAI API Error: ${status} - ${error.message}`)
|
||||
logger.error(
|
||||
`[${requestId}] ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} API Error: ${status} - ${error.message}`
|
||||
)
|
||||
|
||||
if (status === 401) {
|
||||
clientErrorMessage = 'Authentication failed. Please check your API key configuration.'
|
||||
@@ -181,6 +216,10 @@ export async function POST(req: NextRequest) {
|
||||
clientErrorMessage =
|
||||
'The wand generation service is currently unavailable. Please try again later.'
|
||||
}
|
||||
} else if (useWandAzure && error.message?.includes('DeploymentNotFound')) {
|
||||
clientErrorMessage =
|
||||
'Azure OpenAI deployment not found. Please check your model deployment configuration.'
|
||||
status = 404
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { webhook, workflow } from '@/db/schema'
|
||||
|
||||
@@ -242,6 +244,167 @@ export async function DELETE(
|
||||
|
||||
const foundWebhook = webhookData.webhook
|
||||
|
||||
// If it's an Airtable webhook, delete it from Airtable first
|
||||
if (foundWebhook.provider === 'airtable') {
|
||||
try {
|
||||
const { baseId, externalId } = (foundWebhook.providerConfig || {}) as {
|
||||
baseId?: string
|
||||
externalId?: string
|
||||
}
|
||||
|
||||
if (!baseId) {
|
||||
logger.warn(`[${requestId}] Missing baseId for Airtable webhook deletion.`, {
|
||||
webhookId: id,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Missing baseId for Airtable webhook deletion' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get access token for the workflow owner
|
||||
const userIdForToken = webhookData.workflow.userId
|
||||
const accessToken = await getOAuthToken(userIdForToken, 'airtable')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Airtable access token for user ${userIdForToken}. Cannot delete webhook in Airtable.`,
|
||||
{ webhookId: id }
|
||||
)
|
||||
return NextResponse.json(
|
||||
{ error: 'Airtable access token not found for webhook deletion' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
// Resolve externalId if missing by listing webhooks and matching our notificationUrl
|
||||
let resolvedExternalId: string | undefined = externalId
|
||||
|
||||
if (!resolvedExternalId) {
|
||||
try {
|
||||
const requestOrigin = new URL(request.url).origin
|
||||
const effectiveOrigin = requestOrigin.includes('localhost')
|
||||
? env.NEXT_PUBLIC_APP_URL || requestOrigin
|
||||
: requestOrigin
|
||||
const expectedNotificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${foundWebhook.path}`
|
||||
|
||||
const listUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
|
||||
const listResp = await fetch(listUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
const listBody = await listResp.json().catch(() => null)
|
||||
|
||||
if (listResp.ok && listBody && Array.isArray(listBody.webhooks)) {
|
||||
const match = listBody.webhooks.find((w: any) => {
|
||||
const url: string | undefined = w?.notificationUrl
|
||||
if (!url) return false
|
||||
// Prefer exact match; fallback to suffix match to handle origin/host remaps
|
||||
return (
|
||||
url === expectedNotificationUrl ||
|
||||
url.endsWith(`/api/webhooks/trigger/${foundWebhook.path}`)
|
||||
)
|
||||
})
|
||||
if (match?.id) {
|
||||
resolvedExternalId = match.id as string
|
||||
// Persist resolved externalId for future operations
|
||||
try {
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: {
|
||||
...(foundWebhook.providerConfig || {}),
|
||||
externalId: resolvedExternalId,
|
||||
},
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, id))
|
||||
} catch {
|
||||
// non-fatal persistence error
|
||||
}
|
||||
logger.info(`[${requestId}] Resolved Airtable externalId by listing webhooks`, {
|
||||
baseId,
|
||||
externalId: resolvedExternalId,
|
||||
})
|
||||
} else {
|
||||
logger.warn(`[${requestId}] Could not resolve Airtable externalId from list`, {
|
||||
baseId,
|
||||
expectedNotificationUrl,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
logger.warn(`[${requestId}] Failed to list Airtable webhooks to resolve externalId`, {
|
||||
baseId,
|
||||
status: listResp.status,
|
||||
body: listBody,
|
||||
})
|
||||
}
|
||||
} catch (e: any) {
|
||||
logger.warn(`[${requestId}] Error attempting to resolve Airtable externalId`, {
|
||||
error: e?.message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// If still not resolvable, skip remote deletion but proceed with local delete
|
||||
if (!resolvedExternalId) {
|
||||
logger.info(
|
||||
`[${requestId}] Airtable externalId not found; skipping remote deletion and proceeding to remove local record`,
|
||||
{ baseId }
|
||||
)
|
||||
}
|
||||
|
||||
if (resolvedExternalId) {
|
||||
const airtableDeleteUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks/${resolvedExternalId}`
|
||||
const airtableResponse = await fetch(airtableDeleteUrl, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
// Attempt to parse error body for better diagnostics
|
||||
if (!airtableResponse.ok) {
|
||||
let responseBody: any = null
|
||||
try {
|
||||
responseBody = await airtableResponse.json()
|
||||
} catch {
|
||||
// ignore parse errors
|
||||
}
|
||||
|
||||
logger.error(
|
||||
`[${requestId}] Failed to delete Airtable webhook in Airtable. Status: ${airtableResponse.status}`,
|
||||
{ baseId, externalId: resolvedExternalId, response: responseBody }
|
||||
)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to delete webhook from Airtable',
|
||||
details:
|
||||
(responseBody && (responseBody.error?.message || responseBody.error)) ||
|
||||
`Status ${airtableResponse.status}`,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted Airtable webhook in Airtable`, {
|
||||
baseId,
|
||||
externalId: resolvedExternalId,
|
||||
})
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deleting Airtable webhook`, {
|
||||
webhookId: id,
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to delete webhook from Airtable', details: error.message },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// If it's a Telegram webhook, delete it from Telegram first
|
||||
if (foundWebhook.provider === 'telegram') {
|
||||
try {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { acquireLock, releaseLock } from '@/lib/redis'
|
||||
import { pollGmailWebhooks } from '@/lib/webhooks/gmail-polling-service'
|
||||
|
||||
const logger = new Logger('GmailPollingAPI')
|
||||
const logger = createLogger('GmailPollingAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { acquireLock, releaseLock } from '@/lib/redis'
|
||||
import { pollOutlookWebhooks } from '@/lib/webhooks/outlook-polling-service'
|
||||
|
||||
const logger = new Logger('OutlookPollingAPI')
|
||||
const logger = createLogger('OutlookPollingAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
|
||||
|
||||
@@ -5,7 +5,22 @@ import { NextRequest } from 'next/server'
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockExecutionDependencies,
|
||||
mockTriggerDevSdk,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
// Prefer mocking the background module to avoid loading Trigger.dev at all during tests
|
||||
vi.mock('@/background/webhook-execution', () => ({
|
||||
executeWebhookJob: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
workflowId: 'test-workflow-id',
|
||||
executionId: 'test-exec-id',
|
||||
output: {},
|
||||
executedAt: new Date().toISOString(),
|
||||
}),
|
||||
}))
|
||||
|
||||
const hasProcessedMessageMock = vi.fn().mockResolvedValue(false)
|
||||
const markMessageAsProcessedMock = vi.fn().mockResolvedValue(true)
|
||||
@@ -111,6 +126,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
vi.resetAllMocks()
|
||||
|
||||
mockExecutionDependencies()
|
||||
mockTriggerDevSdk()
|
||||
|
||||
vi.doMock('@/services/queue', () => ({
|
||||
RateLimiter: vi.fn().mockImplementation(() => ({
|
||||
@@ -309,11 +325,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
const req = createMockRequest('POST', { event: 'test', id: 'test-123' })
|
||||
const params = Promise.resolve({ path: 'test-path' })
|
||||
|
||||
vi.doMock('@trigger.dev/sdk/v3', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
}))
|
||||
mockTriggerDevSdk()
|
||||
|
||||
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
|
||||
const response = await POST(req, { params })
|
||||
@@ -339,11 +351,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
const req = createMockRequest('POST', { event: 'bearer.test' }, headers)
|
||||
const params = Promise.resolve({ path: 'test-path' })
|
||||
|
||||
vi.doMock('@trigger.dev/sdk/v3', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
}))
|
||||
mockTriggerDevSdk()
|
||||
|
||||
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
|
||||
const response = await POST(req, { params })
|
||||
@@ -369,11 +377,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
const req = createMockRequest('POST', { event: 'custom.header.test' }, headers)
|
||||
const params = Promise.resolve({ path: 'test-path' })
|
||||
|
||||
vi.doMock('@trigger.dev/sdk/v3', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
}))
|
||||
mockTriggerDevSdk()
|
||||
|
||||
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
|
||||
const response = await POST(req, { params })
|
||||
@@ -391,7 +395,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
token: 'case-test-token',
|
||||
})
|
||||
|
||||
vi.doMock('@trigger.dev/sdk/v3', () => ({
|
||||
vi.doMock('@trigger.dev/sdk', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
@@ -430,7 +434,7 @@ describe('Webhook Trigger API Route', () => {
|
||||
secretHeaderName: 'X-Secret-Key',
|
||||
})
|
||||
|
||||
vi.doMock('@trigger.dev/sdk/v3', () => ({
|
||||
vi.doMock('@trigger.dev/sdk', () => ({
|
||||
tasks: {
|
||||
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
|
||||
},
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { tasks } from '@trigger.dev/sdk/v3'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing'
|
||||
import { env, isTruthy } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
handleSlackChallenge,
|
||||
handleWhatsAppVerification,
|
||||
validateMicrosoftTeamsSignature,
|
||||
} from '@/lib/webhooks/utils'
|
||||
import { executeWebhookJob } from '@/background/webhook-execution'
|
||||
import { db } from '@/db'
|
||||
import { subscription, webhook, workflow } from '@/db/schema'
|
||||
import { RateLimiter } from '@/services/queue'
|
||||
@@ -17,6 +19,7 @@ const logger = createLogger('WebhookTriggerAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* Webhook Verification Handler (GET)
|
||||
@@ -330,10 +333,9 @@ export async function POST(
|
||||
// Continue processing - better to risk usage limit bypass than fail webhook
|
||||
}
|
||||
|
||||
// --- PHASE 5: Queue webhook execution via trigger.dev ---
|
||||
// --- PHASE 5: Queue webhook execution (trigger.dev or direct based on env) ---
|
||||
try {
|
||||
// Queue the webhook execution task
|
||||
const handle = await tasks.trigger('webhook-execution', {
|
||||
const payload = {
|
||||
webhookId: foundWebhook.id,
|
||||
workflowId: foundWorkflow.id,
|
||||
userId: foundWorkflow.userId,
|
||||
@@ -342,11 +344,24 @@ export async function POST(
|
||||
headers: Object.fromEntries(request.headers.entries()),
|
||||
path,
|
||||
blockId: foundWebhook.blockId,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queued webhook execution task ${handle.id} for ${foundWebhook.provider} webhook`
|
||||
)
|
||||
const useTrigger = isTruthy(env.TRIGGER_DEV_ENABLED)
|
||||
|
||||
if (useTrigger) {
|
||||
const handle = await tasks.trigger('webhook-execution', payload)
|
||||
logger.info(
|
||||
`[${requestId}] Queued webhook execution task ${handle.id} for ${foundWebhook.provider} webhook`
|
||||
)
|
||||
} else {
|
||||
// Fire-and-forget direct execution to avoid blocking webhook response
|
||||
void executeWebhookJob(payload).catch((error) => {
|
||||
logger.error(`[${requestId}] Direct webhook execution failed`, error)
|
||||
})
|
||||
logger.info(
|
||||
`[${requestId}] Queued direct webhook execution for ${foundWebhook.provider} webhook (Trigger.dev disabled)`
|
||||
)
|
||||
}
|
||||
|
||||
// Return immediate acknowledgment with provider-specific format
|
||||
if (foundWebhook.provider === 'microsoftteams') {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { tasks } from '@trigger.dev/sdk/v3'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
@@ -540,7 +540,7 @@ export async function POST(
|
||||
)
|
||||
}
|
||||
|
||||
// Rate limit passed - trigger the task
|
||||
// Rate limit passed - always use Trigger.dev for async executions
|
||||
const handle = await tasks.trigger('workflow-execution', {
|
||||
workflowId,
|
||||
userId: authenticatedUserId,
|
||||
|
||||
@@ -8,7 +8,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { db } from '@/db'
|
||||
import { apiKey as apiKeyTable, workflow } from '@/db/schema'
|
||||
import { apiKey as apiKeyTable, templates, workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('WorkflowByIdAPI')
|
||||
|
||||
@@ -218,6 +218,48 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if workflow has published templates before deletion
|
||||
const { searchParams } = new URL(request.url)
|
||||
const checkTemplates = searchParams.get('check-templates') === 'true'
|
||||
const deleteTemplatesParam = searchParams.get('deleteTemplates')
|
||||
|
||||
if (checkTemplates) {
|
||||
// Return template information for frontend to handle
|
||||
const publishedTemplates = await db
|
||||
.select()
|
||||
.from(templates)
|
||||
.where(eq(templates.workflowId, workflowId))
|
||||
|
||||
return NextResponse.json({
|
||||
hasPublishedTemplates: publishedTemplates.length > 0,
|
||||
count: publishedTemplates.length,
|
||||
publishedTemplates: publishedTemplates.map((t) => ({
|
||||
id: t.id,
|
||||
name: t.name,
|
||||
views: t.views,
|
||||
stars: t.stars,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
// Handle template deletion based on user choice
|
||||
if (deleteTemplatesParam !== null) {
|
||||
const deleteTemplates = deleteTemplatesParam === 'delete'
|
||||
|
||||
if (deleteTemplates) {
|
||||
// Delete all templates associated with this workflow
|
||||
await db.delete(templates).where(eq(templates.workflowId, workflowId))
|
||||
logger.info(`[${requestId}] Deleted templates for workflow ${workflowId}`)
|
||||
} else {
|
||||
// Orphan the templates (set workflowId to null)
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ workflowId: null })
|
||||
.where(eq(templates.workflowId, workflowId))
|
||||
logger.info(`[${requestId}] Orphaned templates for workflow ${workflowId}`)
|
||||
}
|
||||
}
|
||||
|
||||
await db.delete(workflow).where(eq(workflow.id, workflowId))
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -8,7 +8,7 @@ const logger = createLogger('WorkspaceByIdAPI')
|
||||
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { knowledgeBase, permissions, workspace } from '@/db/schema'
|
||||
import { knowledgeBase, permissions, templates, workspace } from '@/db/schema'
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const { id } = await params
|
||||
@@ -19,6 +19,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
const workspaceId = id
|
||||
const url = new URL(request.url)
|
||||
const checkTemplates = url.searchParams.get('check-templates') === 'true'
|
||||
|
||||
// Check if user has any access to this workspace
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
@@ -26,6 +28,42 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
// If checking for published templates before deletion
|
||||
if (checkTemplates) {
|
||||
try {
|
||||
// Get all workflows in this workspace
|
||||
const workspaceWorkflows = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
if (workspaceWorkflows.length === 0) {
|
||||
return NextResponse.json({ hasPublishedTemplates: false, publishedTemplates: [] })
|
||||
}
|
||||
|
||||
const workflowIds = workspaceWorkflows.map((w) => w.id)
|
||||
|
||||
// Check for published templates that reference these workflows
|
||||
const publishedTemplates = await db
|
||||
.select({
|
||||
id: templates.id,
|
||||
name: templates.name,
|
||||
workflowId: templates.workflowId,
|
||||
})
|
||||
.from(templates)
|
||||
.where(inArray(templates.workflowId, workflowIds))
|
||||
|
||||
return NextResponse.json({
|
||||
hasPublishedTemplates: publishedTemplates.length > 0,
|
||||
publishedTemplates,
|
||||
count: publishedTemplates.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error checking published templates for workspace ${workspaceId}:`, error)
|
||||
return NextResponse.json({ error: 'Failed to check published templates' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// Get workspace details
|
||||
const workspaceDetails = await db
|
||||
.select()
|
||||
@@ -108,6 +146,8 @@ export async function DELETE(
|
||||
}
|
||||
|
||||
const workspaceId = id
|
||||
const body = await request.json().catch(() => ({}))
|
||||
const { deleteTemplates = false } = body // User's choice: false = keep templates (recommended), true = delete templates
|
||||
|
||||
// Check if user has admin permissions to delete workspace
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
@@ -116,10 +156,39 @@ export async function DELETE(
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Deleting workspace ${workspaceId} for user ${session.user.id}`)
|
||||
logger.info(
|
||||
`Deleting workspace ${workspaceId} for user ${session.user.id}, deleteTemplates: ${deleteTemplates}`
|
||||
)
|
||||
|
||||
// Delete workspace and all related data in a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
// Get all workflows in this workspace before deletion
|
||||
const workspaceWorkflows = await tx
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
if (workspaceWorkflows.length > 0) {
|
||||
const workflowIds = workspaceWorkflows.map((w) => w.id)
|
||||
|
||||
// Handle templates based on user choice
|
||||
if (deleteTemplates) {
|
||||
// Delete published templates that reference these workflows
|
||||
await tx.delete(templates).where(inArray(templates.workflowId, workflowIds))
|
||||
logger.info(`Deleted templates for workflows in workspace ${workspaceId}`)
|
||||
} else {
|
||||
// Set workflowId to null for templates to create "orphaned" templates
|
||||
// This allows templates to remain in marketplace but without source workflows
|
||||
await tx
|
||||
.update(templates)
|
||||
.set({ workflowId: null })
|
||||
.where(inArray(templates.workflowId, workflowIds))
|
||||
logger.info(
|
||||
`Updated templates to orphaned status for workflows in workspace ${workspaceId}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all workflows in the workspace - database cascade will handle all workflow-related data
|
||||
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows,
|
||||
// workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans,
|
||||
|
||||
@@ -91,6 +91,7 @@ describe('Workspace Invitations API Route', () => {
|
||||
env: {
|
||||
RESEND_API_KEY: 'test-resend-key',
|
||||
NEXT_PUBLIC_APP_URL: 'https://test.sim.ai',
|
||||
FROM_EMAIL_ADDRESS: 'Sim <noreply@test.sim.ai>',
|
||||
EMAIL_DOMAIN: 'test.sim.ai',
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -2,12 +2,12 @@ import { randomUUID } from 'crypto'
|
||||
import { render } from '@react-email/render'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { Resend } from 'resend'
|
||||
import { WorkspaceInvitationEmail } from '@/components/emails/workspace-invitation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { getFromEmailAddress } from '@/lib/email/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getEmailDomain } from '@/lib/urls/utils'
|
||||
import { db } from '@/db'
|
||||
import {
|
||||
permissions,
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceInvitationsAPI')
|
||||
const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
|
||||
|
||||
type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
|
||||
|
||||
@@ -241,30 +240,23 @@ async function sendInvitationEmail({
|
||||
})
|
||||
)
|
||||
|
||||
if (!resend) {
|
||||
logger.error('RESEND_API_KEY not configured')
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Email service not configured. Please set RESEND_API_KEY in environment variables.',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const emailDomain = env.EMAIL_DOMAIN || getEmailDomain()
|
||||
const fromAddress = `noreply@${emailDomain}`
|
||||
const fromAddress = getFromEmailAddress()
|
||||
|
||||
logger.info(`Attempting to send email from ${fromAddress} to ${to}`)
|
||||
|
||||
const result = await resend.emails.send({
|
||||
from: fromAddress,
|
||||
const result = await sendEmail({
|
||||
to,
|
||||
subject: `You've been invited to join "${workspaceName}" on Sim`,
|
||||
html: emailHtml,
|
||||
from: fromAddress,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
|
||||
logger.info(`Invitation email sent successfully to ${to}`, { result })
|
||||
if (result.success) {
|
||||
logger.info(`Invitation email sent successfully to ${to}`, { result })
|
||||
} else {
|
||||
logger.error(`Failed to send invitation email to ${to}`, { error: result.message })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error sending invitation email:', error)
|
||||
// Continue even if email fails - the invitation is still created
|
||||
|
||||
167
apps/sim/app/chat/[subdomain]/chat-client.css
Normal file
167
apps/sim/app/chat/[subdomain]/chat-client.css
Normal file
@@ -0,0 +1,167 @@
|
||||
/* Force light mode for chat subdomain by overriding dark mode utilities */
|
||||
/* This file uses CSS variables from globals.css light mode theme */
|
||||
|
||||
/* When inside the chat layout, force all light mode CSS variables */
|
||||
.chat-light-wrapper {
|
||||
/* Core Colors - from globals.css light mode */
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 0 0% 3.9%;
|
||||
|
||||
/* Card Colors */
|
||||
--card: 0 0% 99.2%;
|
||||
--card-foreground: 0 0% 3.9%;
|
||||
|
||||
/* Popover Colors */
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 0 0% 3.9%;
|
||||
|
||||
/* Primary Colors */
|
||||
--primary: 0 0% 11.2%;
|
||||
--primary-foreground: 0 0% 98%;
|
||||
|
||||
/* Secondary Colors */
|
||||
--secondary: 0 0% 96.1%;
|
||||
--secondary-foreground: 0 0% 11.2%;
|
||||
|
||||
/* Muted Colors */
|
||||
--muted: 0 0% 96.1%;
|
||||
--muted-foreground: 0 0% 46.9%;
|
||||
|
||||
/* Accent Colors */
|
||||
--accent: 0 0% 92.5%;
|
||||
--accent-foreground: 0 0% 11.2%;
|
||||
|
||||
/* Destructive Colors */
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
|
||||
/* Border & Input Colors */
|
||||
--border: 0 0% 89.8%;
|
||||
--input: 0 0% 89.8%;
|
||||
--ring: 0 0% 3.9%;
|
||||
|
||||
/* Border Radius */
|
||||
--radius: 0.5rem;
|
||||
|
||||
/* Scrollbar Properties */
|
||||
--scrollbar-track: 0 0% 85%;
|
||||
--scrollbar-thumb: 0 0% 65%;
|
||||
--scrollbar-thumb-hover: 0 0% 55%;
|
||||
--scrollbar-size: 8px;
|
||||
|
||||
/* Workflow Properties */
|
||||
--workflow-background: 0 0% 100%;
|
||||
--workflow-dots: 0 0% 94.5%;
|
||||
--card-background: 0 0% 99.2%;
|
||||
--card-border: 0 0% 89.8%;
|
||||
--card-text: 0 0% 3.9%;
|
||||
--card-hover: 0 0% 96.1%;
|
||||
|
||||
/* Base Component Properties */
|
||||
--base-muted-foreground: #737373;
|
||||
|
||||
/* Gradient Colors */
|
||||
--gradient-primary: 263 85% 70%;
|
||||
--gradient-secondary: 336 95% 65%;
|
||||
|
||||
/* Brand Colors */
|
||||
--brand-primary-hex: #701ffc;
|
||||
--brand-primary-hover-hex: #802fff;
|
||||
--brand-secondary-hex: #6518e6;
|
||||
--brand-accent-hex: #9d54ff;
|
||||
--brand-accent-hover-hex: #a66fff;
|
||||
--brand-background-hex: #0c0c0c;
|
||||
|
||||
/* UI Surface Colors */
|
||||
--surface-elevated: #202020;
|
||||
}
|
||||
|
||||
/* Override dark mode utility classes using CSS variables */
|
||||
.chat-light-wrapper :is(.dark\:bg-black) {
|
||||
background-color: hsl(var(--secondary));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:bg-gray-900) {
|
||||
background-color: hsl(var(--background));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:bg-gray-800) {
|
||||
background-color: hsl(var(--secondary));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:bg-gray-700) {
|
||||
background-color: hsl(var(--accent));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:bg-gray-600) {
|
||||
background-color: hsl(var(--muted));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:bg-gray-300) {
|
||||
background-color: hsl(var(--primary));
|
||||
}
|
||||
|
||||
/* Text color overrides using CSS variables */
|
||||
.chat-light-wrapper :is(.dark\:text-gray-100) {
|
||||
color: hsl(var(--primary));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:text-gray-200) {
|
||||
color: hsl(var(--foreground));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:text-gray-300) {
|
||||
color: hsl(var(--muted-foreground));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:text-gray-400) {
|
||||
color: hsl(var(--muted-foreground));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:text-neutral-600) {
|
||||
color: hsl(var(--muted-foreground));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:text-blue-400) {
|
||||
color: var(--brand-accent-hex);
|
||||
}
|
||||
|
||||
/* Border color overrides using CSS variables */
|
||||
.chat-light-wrapper :is(.dark\:border-gray-700) {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:border-gray-800) {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:border-gray-600) {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
.chat-light-wrapper :is(.dark\:divide-gray-700) > * + * {
|
||||
border-color: hsl(var(--border));
|
||||
}
|
||||
|
||||
/* Hover state overrides */
|
||||
.chat-light-wrapper :is(.dark\:hover\:bg-gray-800\/60:hover) {
|
||||
background-color: hsl(var(--card-hover));
|
||||
}
|
||||
|
||||
/* Code blocks specific overrides using CSS variables */
|
||||
.chat-light-wrapper pre:is(.dark\:bg-black) {
|
||||
background-color: hsl(var(--workflow-dots));
|
||||
}
|
||||
|
||||
.chat-light-wrapper code:is(.dark\:bg-gray-700) {
|
||||
background-color: hsl(var(--accent));
|
||||
}
|
||||
|
||||
.chat-light-wrapper code:is(.dark\:text-gray-200) {
|
||||
color: hsl(var(--foreground));
|
||||
}
|
||||
|
||||
/* Force color scheme */
|
||||
.chat-light-wrapper {
|
||||
color-scheme: light !important;
|
||||
}
|
||||
@@ -481,7 +481,7 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
|
||||
// Standard text-based chat interface
|
||||
return (
|
||||
<div className='fixed inset-0 z-[100] flex flex-col bg-background'>
|
||||
<div className='fixed inset-0 z-[100] flex flex-col bg-background text-foreground'>
|
||||
{/* Header component */}
|
||||
<ChatHeader chatConfig={chatConfig} starCount={starCount} />
|
||||
|
||||
|
||||
@@ -22,53 +22,14 @@ export function ChatHeader({ chatConfig, starCount }: ChatHeaderProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-between bg-background/95 px-6 py-4 pt-6 backdrop-blur supports-[backdrop-filter]:bg-background/60 md:px-8 md:pt-4'>
|
||||
<div className='flex items-center gap-4'>
|
||||
{customImage ? (
|
||||
{customImage && (
|
||||
<img
|
||||
src={customImage}
|
||||
alt={`${chatConfig?.title || 'Chat'} logo`}
|
||||
className='h-12 w-12 rounded-md object-cover'
|
||||
className='h-8 w-8 rounded-md object-cover'
|
||||
/>
|
||||
) : (
|
||||
// Default Sim Studio logo when no custom image is provided
|
||||
<div
|
||||
className='flex h-12 w-12 items-center justify-center rounded-md'
|
||||
style={{ backgroundColor: primaryColor }}
|
||||
>
|
||||
<svg
|
||||
width='20'
|
||||
height='20'
|
||||
viewBox='0 0 50 50'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M34.1455 20.0728H16.0364C12.7026 20.0728 10 22.7753 10 26.1091V35.1637C10 38.4975 12.7026 41.2 16.0364 41.2H34.1455C37.4792 41.2 40.1818 38.4975 40.1818 35.1637V26.1091C40.1818 22.7753 37.4792 20.0728 34.1455 20.0728Z'
|
||||
fill={primaryColor}
|
||||
stroke='white'
|
||||
strokeWidth='3.5'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M25.0919 14.0364C26.7588 14.0364 28.1101 12.6851 28.1101 11.0182C28.1101 9.35129 26.7588 8 25.0919 8C23.425 8 22.0737 9.35129 22.0737 11.0182C22.0737 12.6851 23.425 14.0364 25.0919 14.0364Z'
|
||||
fill={primaryColor}
|
||||
stroke='white'
|
||||
strokeWidth='4'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<path
|
||||
d='M25.0915 14.856V19.0277M20.5645 32.1398V29.1216M29.619 29.1216V32.1398'
|
||||
stroke='white'
|
||||
strokeWidth='4'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
/>
|
||||
<circle cx='25' cy='11' r='2' fill={primaryColor} />
|
||||
</svg>
|
||||
</div>
|
||||
)}
|
||||
<h2 className='font-medium text-lg'>
|
||||
<h2 className='font-medium text-foreground text-lg'>
|
||||
{chatConfig?.customizations?.headerText || chatConfig?.title || 'Chat'}
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
export function ChatLoadingState() {
|
||||
return (
|
||||
<div className='flex min-h-screen items-center justify-center bg-gray-50'>
|
||||
<div className='flex min-h-screen items-center justify-center bg-background text-foreground'>
|
||||
<div className='animate-pulse text-center'>
|
||||
<div className='mx-auto mb-4 h-8 w-48 rounded bg-gray-200' />
|
||||
<div className='mx-auto h-4 w-64 rounded bg-gray-200' />
|
||||
<div className='mx-auto mb-4 h-8 w-48 rounded bg-muted' />
|
||||
<div className='mx-auto h-4 w-64 rounded bg-muted' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
19
apps/sim/app/chat/[subdomain]/layout.tsx
Normal file
19
apps/sim/app/chat/[subdomain]/layout.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
'use client'
|
||||
|
||||
import { ThemeProvider } from 'next-themes'
|
||||
import './chat-client.css'
|
||||
|
||||
export default function ChatLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<ThemeProvider
|
||||
attribute='class'
|
||||
forcedTheme='light'
|
||||
enableSystem={false}
|
||||
disableTransitionOnChange
|
||||
>
|
||||
<div className='light chat-light-wrapper' style={{ colorScheme: 'light' }}>
|
||||
{children}
|
||||
</div>
|
||||
</ThemeProvider>
|
||||
)
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { SpeedInsights } from '@vercel/speed-insights/next'
|
||||
import type { Metadata, Viewport } from 'next'
|
||||
import { PublicEnvScript } from 'next-runtime-env'
|
||||
import { BrandedLayout } from '@/components/branded-layout'
|
||||
import { generateThemeCSS } from '@/lib/branding/inject-theme'
|
||||
import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/metadata'
|
||||
import { env } from '@/lib/env'
|
||||
import { isHosted } from '@/lib/environment'
|
||||
@@ -10,6 +11,8 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import '@/app/globals.css'
|
||||
|
||||
import { SessionProvider } from '@/lib/session-context'
|
||||
import { ThemeProvider } from '@/app/theme-provider'
|
||||
import { ZoomPrevention } from '@/app/zoom-prevention'
|
||||
|
||||
const logger = createLogger('RootLayout')
|
||||
@@ -45,11 +48,14 @@ if (typeof window !== 'undefined') {
|
||||
}
|
||||
|
||||
export const viewport: Viewport = {
|
||||
themeColor: '#ffffff',
|
||||
width: 'device-width',
|
||||
initialScale: 1,
|
||||
maximumScale: 1,
|
||||
userScalable: false,
|
||||
themeColor: [
|
||||
{ media: '(prefers-color-scheme: light)', color: '#ffffff' },
|
||||
{ media: '(prefers-color-scheme: dark)', color: '#0c0c0c' },
|
||||
],
|
||||
}
|
||||
|
||||
// Generate dynamic metadata based on brand configuration
|
||||
@@ -57,6 +63,7 @@ export const metadata: Metadata = generateBrandedMetadata()
|
||||
|
||||
export default function RootLayout({ children }: { children: React.ReactNode }) {
|
||||
const structuredData = generateStructuredData()
|
||||
const themeCSS = generateThemeCSS()
|
||||
|
||||
return (
|
||||
<html lang='en' suppressHydrationWarning>
|
||||
@@ -69,9 +76,18 @@ export default function RootLayout({ children }: { children: React.ReactNode })
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Theme CSS Override */}
|
||||
{themeCSS && (
|
||||
<style
|
||||
id='theme-override'
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: themeCSS,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Meta tags for better SEO */}
|
||||
<meta name='theme-color' content='#ffffff' />
|
||||
<meta name='color-scheme' content='light' />
|
||||
<meta name='color-scheme' content='light dark' />
|
||||
<meta name='format-detection' content='telephone=no' />
|
||||
<meta httpEquiv='x-ua-compatible' content='ie=edge' />
|
||||
|
||||
@@ -107,16 +123,20 @@ export default function RootLayout({ children }: { children: React.ReactNode })
|
||||
)}
|
||||
</head>
|
||||
<body suppressHydrationWarning>
|
||||
<BrandedLayout>
|
||||
<ZoomPrevention />
|
||||
{children}
|
||||
{isHosted && (
|
||||
<>
|
||||
<SpeedInsights />
|
||||
<Analytics />
|
||||
</>
|
||||
)}
|
||||
</BrandedLayout>
|
||||
<ThemeProvider>
|
||||
<SessionProvider>
|
||||
<BrandedLayout>
|
||||
<ZoomPrevention />
|
||||
{children}
|
||||
{isHosted && (
|
||||
<>
|
||||
<SpeedInsights />
|
||||
<Analytics />
|
||||
</>
|
||||
)}
|
||||
</BrandedLayout>
|
||||
</SessionProvider>
|
||||
</ThemeProvider>
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
|
||||
@@ -11,8 +11,8 @@ export default function manifest(): MetadataRoute.Manifest {
|
||||
'Build and deploy AI agents using our Figma-like canvas. Build, write evals, and deploy AI agent workflows that automate workflows and streamline your business processes.',
|
||||
start_url: '/',
|
||||
display: 'standalone',
|
||||
background_color: '#701FFC', // Default Sim brand primary color
|
||||
theme_color: '#701FFC', // Default Sim brand primary color
|
||||
background_color: brand.theme?.backgroundColor || '#701FFC',
|
||||
theme_color: brand.theme?.primaryColor || '#701FFC',
|
||||
icons: [
|
||||
{
|
||||
src: '/favicon/android-chrome-192x192.png',
|
||||
|
||||
19
apps/sim/app/theme-provider.tsx
Normal file
19
apps/sim/app/theme-provider.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
'use client'
|
||||
|
||||
import type { ThemeProviderProps } from 'next-themes'
|
||||
import { ThemeProvider as NextThemesProvider } from 'next-themes'
|
||||
|
||||
export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
return (
|
||||
<NextThemesProvider
|
||||
attribute='class'
|
||||
defaultTheme='system'
|
||||
enableSystem
|
||||
disableTransitionOnChange
|
||||
storageKey='sim-theme'
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</NextThemesProvider>
|
||||
)
|
||||
}
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import React from 'react'
|
||||
import { TooltipProvider } from '@/components/ui/tooltip'
|
||||
import { ThemeProvider } from '@/app/workspace/[workspaceId]/providers/theme-provider'
|
||||
import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { SettingsLoader } from './settings-loader'
|
||||
|
||||
interface ProvidersProps {
|
||||
children: React.ReactNode
|
||||
@@ -11,11 +11,12 @@ interface ProvidersProps {
|
||||
|
||||
const Providers = React.memo<ProvidersProps>(({ children }) => {
|
||||
return (
|
||||
<ThemeProvider>
|
||||
<>
|
||||
<SettingsLoader />
|
||||
<TooltipProvider delayDuration={100} skipDelayDuration={0}>
|
||||
<WorkspacePermissionsProvider>{children}</WorkspacePermissionsProvider>
|
||||
</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</>
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
|
||||
/**
|
||||
* Loads user settings from database once per workspace session.
|
||||
* This ensures settings are synced from DB on initial load but uses
|
||||
* localStorage cache for subsequent navigation within the app.
|
||||
*/
|
||||
export function SettingsLoader() {
|
||||
const { data: session, isPending: isSessionPending } = useSession()
|
||||
const loadSettings = useGeneralStore((state) => state.loadSettings)
|
||||
const hasLoadedRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
// Only load settings once per session for authenticated users
|
||||
if (!isSessionPending && session?.user && !hasLoadedRef.current) {
|
||||
hasLoadedRef.current = true
|
||||
// Force load from DB on initial workspace entry
|
||||
loadSettings(true)
|
||||
}
|
||||
}, [isSessionPending, session?.user, loadSettings])
|
||||
|
||||
return null
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect } from 'react'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
|
||||
export function ThemeProvider({ children }: { children: React.ReactNode }) {
|
||||
const theme = useGeneralStore((state) => state.theme)
|
||||
|
||||
useEffect(() => {
|
||||
const root = window.document.documentElement
|
||||
root.classList.remove('light', 'dark')
|
||||
|
||||
// If theme is system, check system preference
|
||||
if (theme === 'system') {
|
||||
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches
|
||||
root.classList.add(prefersDark ? 'dark' : 'light')
|
||||
} else {
|
||||
root.classList.add(theme)
|
||||
}
|
||||
}, [theme])
|
||||
|
||||
return children
|
||||
}
|
||||
@@ -29,7 +29,7 @@ export type CategoryValue = (typeof categories)[number]['value']
|
||||
// Template data structure
|
||||
export interface Template {
|
||||
id: string
|
||||
workflowId: string
|
||||
workflowId: string | null
|
||||
userId: string
|
||||
name: string
|
||||
description: string | null
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { getEnv, isTruthy } from '@/lib/env'
|
||||
|
||||
interface ExampleCommandProps {
|
||||
command: string
|
||||
@@ -32,6 +33,7 @@ export function ExampleCommand({
|
||||
}: ExampleCommandProps) {
|
||||
const [mode, setMode] = useState<ExampleMode>('sync')
|
||||
const [exampleType, setExampleType] = useState<ExampleType>('execute')
|
||||
const isAsyncEnabled = isTruthy(getEnv('NEXT_PUBLIC_TRIGGER_DEV_ENABLED'))
|
||||
|
||||
// Format the curl command to use a placeholder for the API key
|
||||
const formatCurlCommand = (command: string, apiKey: string) => {
|
||||
@@ -146,62 +148,67 @@ export function ExampleCommand({
|
||||
<div className='space-y-1.5'>
|
||||
<div className='flex items-center justify-between'>
|
||||
{showLabel && <Label className='font-medium text-sm'>Example</Label>}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setMode('sync')}
|
||||
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
|
||||
mode === 'sync'
|
||||
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
|
||||
: ''
|
||||
}`}
|
||||
>
|
||||
Sync
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setMode('async')}
|
||||
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
|
||||
mode === 'async'
|
||||
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
|
||||
: ''
|
||||
}`}
|
||||
>
|
||||
Async
|
||||
</Button>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-6 min-w-[140px] justify-between px-2 py-1 text-xs'
|
||||
disabled={mode === 'sync'}
|
||||
>
|
||||
<span className='truncate'>{getExampleTitle()}</span>
|
||||
<ChevronDown className='ml-1 h-3 w-3 flex-shrink-0' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align='end'>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('execute')}
|
||||
>
|
||||
Async Execution
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem className='cursor-pointer' onClick={() => setExampleType('status')}>
|
||||
Check Job Status
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('rate-limits')}
|
||||
>
|
||||
Rate Limits & Usage
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
{isAsyncEnabled && (
|
||||
<div className='flex items-center gap-1'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setMode('sync')}
|
||||
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
|
||||
mode === 'sync'
|
||||
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
|
||||
: ''
|
||||
}`}
|
||||
>
|
||||
Sync
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setMode('async')}
|
||||
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
|
||||
mode === 'async'
|
||||
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
|
||||
: ''
|
||||
}`}
|
||||
>
|
||||
Async
|
||||
</Button>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-6 min-w-[140px] justify-between px-2 py-1 text-xs'
|
||||
disabled={mode === 'sync'}
|
||||
>
|
||||
<span className='truncate'>{getExampleTitle()}</span>
|
||||
<ChevronDown className='ml-1 h-3 w-3 flex-shrink-0' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align='end'>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('execute')}
|
||||
>
|
||||
Async Execution
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('status')}
|
||||
>
|
||||
Check Job Status
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
className='cursor-pointer'
|
||||
onClick={() => setExampleType('rate-limits')}
|
||||
>
|
||||
Rate Limits & Usage
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='group relative h-[120px] rounded-md border bg-background transition-colors hover:bg-muted/50'>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import {
|
||||
Award,
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
Database,
|
||||
DollarSign,
|
||||
Edit,
|
||||
Eye,
|
||||
FileText,
|
||||
Folder,
|
||||
Globe,
|
||||
@@ -48,6 +49,16 @@ import {
|
||||
} from 'lucide-react'
|
||||
import { useForm } from 'react-hook-form'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from '@/components/ui/alert-dialog'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { ColorPicker } from '@/components/ui/color-picker'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
@@ -68,6 +79,7 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui/select'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -100,7 +112,6 @@ interface TemplateModalProps {
|
||||
workflowId: string
|
||||
}
|
||||
|
||||
// Enhanced icon selection with category-relevant icons
|
||||
const icons = [
|
||||
// Content & Documentation
|
||||
{ value: 'FileText', label: 'File Text', component: FileText },
|
||||
@@ -165,6 +176,10 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
const { data: session } = useSession()
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [iconPopoverOpen, setIconPopoverOpen] = useState(false)
|
||||
const [existingTemplate, setExistingTemplate] = useState<any>(null)
|
||||
const [isLoadingTemplate, setIsLoadingTemplate] = useState(false)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
const form = useForm<TemplateFormData>({
|
||||
resolver: zodResolver(templateSchema),
|
||||
@@ -178,6 +193,63 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
},
|
||||
})
|
||||
|
||||
// Watch form state to determine if all required fields are valid
|
||||
const formValues = form.watch()
|
||||
const isFormValid =
|
||||
form.formState.isValid &&
|
||||
formValues.name?.trim() &&
|
||||
formValues.description?.trim() &&
|
||||
formValues.author?.trim() &&
|
||||
formValues.category
|
||||
|
||||
// Check for existing template when modal opens
|
||||
useEffect(() => {
|
||||
if (open && workflowId) {
|
||||
checkExistingTemplate()
|
||||
}
|
||||
}, [open, workflowId])
|
||||
|
||||
const checkExistingTemplate = async () => {
|
||||
setIsLoadingTemplate(true)
|
||||
try {
|
||||
const response = await fetch(`/api/templates?workflowId=${workflowId}&limit=1`)
|
||||
if (response.ok) {
|
||||
const result = await response.json()
|
||||
const template = result.data?.[0] || null
|
||||
setExistingTemplate(template)
|
||||
|
||||
// Pre-fill form with existing template data
|
||||
if (template) {
|
||||
form.reset({
|
||||
name: template.name,
|
||||
description: template.description,
|
||||
author: template.author,
|
||||
category: template.category,
|
||||
icon: template.icon,
|
||||
color: template.color,
|
||||
})
|
||||
} else {
|
||||
// No existing template found
|
||||
setExistingTemplate(null)
|
||||
// Reset form to defaults
|
||||
form.reset({
|
||||
name: '',
|
||||
description: '',
|
||||
author: session?.user?.name || session?.user?.email || '',
|
||||
category: '',
|
||||
icon: 'FileText',
|
||||
color: '#3972F6',
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error checking existing template:', error)
|
||||
setExistingTemplate(null)
|
||||
} finally {
|
||||
setIsLoadingTemplate(false)
|
||||
}
|
||||
}
|
||||
|
||||
const onSubmit = async (data: TemplateFormData) => {
|
||||
if (!session?.user) {
|
||||
logger.error('User not authenticated')
|
||||
@@ -201,21 +273,36 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
state: templateState,
|
||||
}
|
||||
|
||||
const response = await fetch('/api/templates', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(templateData),
|
||||
})
|
||||
let response
|
||||
if (existingTemplate) {
|
||||
// Update existing template
|
||||
response = await fetch(`/api/templates/${existingTemplate.id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(templateData),
|
||||
})
|
||||
} else {
|
||||
// Create new template
|
||||
response = await fetch('/api/templates', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(templateData),
|
||||
})
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(errorData.error || 'Failed to create template')
|
||||
throw new Error(
|
||||
errorData.error || `Failed to ${existingTemplate ? 'update' : 'create'} template`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
logger.info('Template created successfully:', result)
|
||||
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully:`, result)
|
||||
|
||||
// Reset form and close modal
|
||||
form.reset()
|
||||
@@ -241,7 +328,35 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Publish Template</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>
|
||||
{isLoadingTemplate
|
||||
? 'Loading...'
|
||||
: existingTemplate
|
||||
? 'Update Template'
|
||||
: 'Publish Template'}
|
||||
</DialogTitle>
|
||||
{existingTemplate && (
|
||||
<div className='flex items-center gap-2'>
|
||||
{existingTemplate.stars > 0 && (
|
||||
<div className='flex items-center gap-1 rounded-full bg-yellow-50 px-2 py-1 dark:bg-yellow-900/20'>
|
||||
<Star className='h-3 w-3 fill-yellow-400 text-yellow-400' />
|
||||
<span className='font-medium text-xs text-yellow-700 dark:text-yellow-300'>
|
||||
{existingTemplate.stars}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{existingTemplate.views > 0 && (
|
||||
<div className='flex items-center gap-1 rounded-full bg-blue-50 px-2 py-1 dark:bg-blue-900/20'>
|
||||
<Eye className='h-3 w-3 text-blue-500' />
|
||||
<span className='font-medium text-blue-700 text-xs dark:text-blue-300'>
|
||||
{existingTemplate.views}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -259,65 +374,189 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
onSubmit={form.handleSubmit(onSubmit)}
|
||||
className='flex flex-1 flex-col overflow-hidden'
|
||||
>
|
||||
<div className='flex-1 overflow-y-auto px-6 py-4'>
|
||||
<div className='space-y-6'>
|
||||
<div className='flex gap-3'>
|
||||
<div className='flex-1 overflow-y-auto px-6 py-6'>
|
||||
{isLoadingTemplate ? (
|
||||
<div className='space-y-6'>
|
||||
{/* Icon and Color row */}
|
||||
<div className='flex gap-3'>
|
||||
<div className='w-20'>
|
||||
<Skeleton className='mb-2 h-4 w-8' /> {/* Label */}
|
||||
<Skeleton className='h-10 w-20' /> {/* Icon picker */}
|
||||
</div>
|
||||
<div className='w-20'>
|
||||
<Skeleton className='mb-2 h-4 w-10' /> {/* Label */}
|
||||
<Skeleton className='h-10 w-20' /> {/* Color picker */}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Name field */}
|
||||
<div>
|
||||
<Skeleton className='mb-2 h-4 w-12' /> {/* Label */}
|
||||
<Skeleton className='h-10 w-full' /> {/* Input */}
|
||||
</div>
|
||||
|
||||
{/* Author and Category row */}
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<div>
|
||||
<Skeleton className='mb-2 h-4 w-14' /> {/* Label */}
|
||||
<Skeleton className='h-10 w-full' /> {/* Input */}
|
||||
</div>
|
||||
<div>
|
||||
<Skeleton className='mb-2 h-4 w-16' /> {/* Label */}
|
||||
<Skeleton className='h-10 w-full' /> {/* Select */}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Description field */}
|
||||
<div>
|
||||
<Skeleton className='mb-2 h-4 w-20' /> {/* Label */}
|
||||
<Skeleton className='h-20 w-full' /> {/* Textarea */}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className='space-y-6'>
|
||||
<div className='flex gap-3'>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='icon'
|
||||
render={({ field }) => (
|
||||
<FormItem className='w-20'>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>
|
||||
Icon
|
||||
</FormLabel>
|
||||
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
|
||||
<SelectedIconComponent className='h-4 w-4' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className='z-50 w-84 p-0' align='start'>
|
||||
<div className='p-3'>
|
||||
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
|
||||
{icons.map((icon) => {
|
||||
const IconComponent = icon.component
|
||||
return (
|
||||
<button
|
||||
key={icon.value}
|
||||
type='button'
|
||||
onClick={() => {
|
||||
field.onChange(icon.value)
|
||||
setIconPopoverOpen(false)
|
||||
}}
|
||||
className={cn(
|
||||
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
|
||||
field.value === icon.value &&
|
||||
'bg-primary text-primary-foreground'
|
||||
)}
|
||||
>
|
||||
<IconComponent className='h-4 w-4' />
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='color'
|
||||
render={({ field }) => (
|
||||
<FormItem className='w-20'>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>
|
||||
Color
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<ColorPicker
|
||||
value={field.value}
|
||||
onChange={field.onChange}
|
||||
onBlur={field.onBlur}
|
||||
className='h-10 w-20'
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='icon'
|
||||
name='name'
|
||||
render={({ field }) => (
|
||||
<FormItem className='w-20'>
|
||||
<FormLabel>Icon</FormLabel>
|
||||
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
|
||||
<SelectedIconComponent className='h-4 w-4' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className='z-50 w-84 p-0' align='start'>
|
||||
<div className='p-3'>
|
||||
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
|
||||
{icons.map((icon) => {
|
||||
const IconComponent = icon.component
|
||||
return (
|
||||
<button
|
||||
key={icon.value}
|
||||
type='button'
|
||||
onClick={() => {
|
||||
field.onChange(icon.value)
|
||||
setIconPopoverOpen(false)
|
||||
}}
|
||||
className={cn(
|
||||
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
|
||||
field.value === icon.value &&
|
||||
'bg-primary text-primary-foreground'
|
||||
)}
|
||||
>
|
||||
<IconComponent className='h-4 w-4' />
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<FormItem>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder='Enter template name' {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='author'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>
|
||||
Author
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder='Enter author name' {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='category'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>
|
||||
Category
|
||||
</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder='Select a category' />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{categories.map((category) => (
|
||||
<SelectItem key={category.value} value={category.value}>
|
||||
{category.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='color'
|
||||
name='description'
|
||||
render={({ field }) => (
|
||||
<FormItem className='w-20'>
|
||||
<FormLabel>Color</FormLabel>
|
||||
<FormItem>
|
||||
<FormLabel className='!text-foreground font-medium text-sm'>
|
||||
Description
|
||||
</FormLabel>
|
||||
<FormControl>
|
||||
<ColorPicker
|
||||
value={field.value}
|
||||
onChange={field.onChange}
|
||||
onBlur={field.onBlur}
|
||||
className='h-10 w-20'
|
||||
<Textarea
|
||||
placeholder='Describe what this template does...'
|
||||
className='resize-none'
|
||||
rows={3}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
@@ -325,91 +564,28 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='name'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder='Enter template name' {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='author'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Author</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder='Enter author name' {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='category'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Category</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder='Select a category' />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
{categories.map((category) => (
|
||||
<SelectItem key={category.value} value={category.value}>
|
||||
{category.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name='description'
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Description</FormLabel>
|
||||
<FormControl>
|
||||
<Textarea
|
||||
placeholder='Describe what this template does...'
|
||||
className='resize-none'
|
||||
rows={3}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Fixed Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-end'>
|
||||
<div className='flex items-center'>
|
||||
{existingTemplate && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='destructive'
|
||||
onClick={() => setShowDeleteDialog(true)}
|
||||
disabled={isSubmitting || isLoadingTemplate}
|
||||
className='h-10 rounded-md px-4 py-2'
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
type='submit'
|
||||
disabled={isSubmitting}
|
||||
disabled={isSubmitting || !isFormValid || isLoadingTemplate}
|
||||
className={cn(
|
||||
'font-medium',
|
||||
'ml-auto font-medium',
|
||||
'bg-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hover-hex)]',
|
||||
'shadow-[0_0_0_0_var(--brand-primary-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]',
|
||||
'text-white transition-all duration-200',
|
||||
@@ -420,16 +596,59 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
|
||||
{isSubmitting ? (
|
||||
<>
|
||||
<Loader2 className='mr-2 h-4 w-4 animate-spin' />
|
||||
Publishing...
|
||||
{existingTemplate ? 'Updating...' : 'Publishing...'}
|
||||
</>
|
||||
) : existingTemplate ? (
|
||||
'Update Template'
|
||||
) : (
|
||||
'Publish'
|
||||
'Publish Template'
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
{existingTemplate && (
|
||||
<AlertDialog open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Delete Template?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
Deleting this template will remove it from the gallery. This action cannot be
|
||||
undone.
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel disabled={isDeleting}>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
className='bg-destructive text-destructive-foreground hover:bg-destructive/90'
|
||||
disabled={isDeleting}
|
||||
onClick={async () => {
|
||||
if (!existingTemplate) return
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
const resp = await fetch(`/api/templates/${existingTemplate.id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (!resp.ok) {
|
||||
const err = await resp.json().catch(() => ({}))
|
||||
throw new Error(err.error || 'Failed to delete template')
|
||||
}
|
||||
setShowDeleteDialog(false)
|
||||
onOpenChange(false)
|
||||
} catch (err) {
|
||||
logger.error('Failed to delete template', err)
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}}
|
||||
>
|
||||
{isDeleting ? 'Deleting...' : 'Delete'}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
)}
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
)
|
||||
|
||||
@@ -18,7 +18,6 @@ import {
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
@@ -113,6 +112,15 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
const [isTemplateModalOpen, setIsTemplateModalOpen] = useState(false)
|
||||
const [isAutoLayouting, setIsAutoLayouting] = useState(false)
|
||||
|
||||
// Delete workflow state - grouped for better organization
|
||||
const [deleteState, setDeleteState] = useState({
|
||||
showDialog: false,
|
||||
isDeleting: false,
|
||||
hasPublishedTemplates: false,
|
||||
publishedTemplates: [] as any[],
|
||||
showTemplateChoice: false,
|
||||
})
|
||||
|
||||
// Deployed state management
|
||||
const [deployedState, setDeployedState] = useState<WorkflowState | null>(null)
|
||||
const [isLoadingDeployedState, setIsLoadingDeployedState] = useState<boolean>(false)
|
||||
@@ -337,35 +345,170 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle deleting the current workflow
|
||||
* Reset delete state
|
||||
*/
|
||||
const handleDeleteWorkflow = () => {
|
||||
const resetDeleteState = useCallback(() => {
|
||||
setDeleteState({
|
||||
showDialog: false,
|
||||
isDeleting: false,
|
||||
hasPublishedTemplates: false,
|
||||
publishedTemplates: [],
|
||||
showTemplateChoice: false,
|
||||
})
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Navigate to next workflow after deletion
|
||||
*/
|
||||
const navigateAfterDeletion = useCallback(
|
||||
(currentWorkflowId: string) => {
|
||||
const sidebarWorkflows = getSidebarOrderedWorkflows()
|
||||
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
|
||||
|
||||
// Find next workflow: try next, then previous
|
||||
let nextWorkflowId: string | null = null
|
||||
if (sidebarWorkflows.length > 1) {
|
||||
if (currentIndex < sidebarWorkflows.length - 1) {
|
||||
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
|
||||
} else if (currentIndex > 0) {
|
||||
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to next workflow or workspace home
|
||||
if (nextWorkflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
|
||||
} else {
|
||||
router.push(`/workspace/${workspaceId}`)
|
||||
}
|
||||
},
|
||||
[workspaceId, router]
|
||||
)
|
||||
|
||||
/**
|
||||
* Check if workflow has published templates
|
||||
*/
|
||||
const checkPublishedTemplates = useCallback(async (workflowId: string) => {
|
||||
const checkResponse = await fetch(`/api/workflows/${workflowId}?check-templates=true`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!checkResponse.ok) {
|
||||
throw new Error(`Failed to check templates: ${checkResponse.statusText}`)
|
||||
}
|
||||
|
||||
return await checkResponse.json()
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Delete workflow with optional template handling
|
||||
*/
|
||||
const deleteWorkflowWithTemplates = useCallback(
|
||||
async (workflowId: string, templateAction?: 'keep' | 'delete') => {
|
||||
const endpoint = templateAction
|
||||
? `/api/workflows/${workflowId}?deleteTemplates=${templateAction}`
|
||||
: null
|
||||
|
||||
if (endpoint) {
|
||||
// Use custom endpoint for template handling
|
||||
const response = await fetch(endpoint, { method: 'DELETE' })
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete workflow: ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Manual registry cleanup since we used custom API
|
||||
useWorkflowRegistry.setState((state) => {
|
||||
const newWorkflows = { ...state.workflows }
|
||||
delete newWorkflows[workflowId]
|
||||
|
||||
return {
|
||||
...state,
|
||||
workflows: newWorkflows,
|
||||
activeWorkflowId: state.activeWorkflowId === workflowId ? null : state.activeWorkflowId,
|
||||
}
|
||||
})
|
||||
} else {
|
||||
// Use registry's built-in deletion (handles database + state)
|
||||
await useWorkflowRegistry.getState().removeWorkflow(workflowId)
|
||||
}
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle deleting the current workflow - called after user confirms
|
||||
*/
|
||||
const handleDeleteWorkflow = useCallback(async () => {
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
if (!currentWorkflowId || !userPermissions.canEdit) return
|
||||
|
||||
const sidebarWorkflows = getSidebarOrderedWorkflows()
|
||||
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
|
||||
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
|
||||
|
||||
// Find next workflow: try next, then previous
|
||||
let nextWorkflowId: string | null = null
|
||||
if (sidebarWorkflows.length > 1) {
|
||||
if (currentIndex < sidebarWorkflows.length - 1) {
|
||||
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
|
||||
} else if (currentIndex > 0) {
|
||||
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
|
||||
try {
|
||||
// Check if workflow has published templates
|
||||
const checkData = await checkPublishedTemplates(currentWorkflowId)
|
||||
|
||||
if (checkData.hasPublishedTemplates) {
|
||||
setDeleteState((prev) => ({
|
||||
...prev,
|
||||
hasPublishedTemplates: true,
|
||||
publishedTemplates: checkData.publishedTemplates || [],
|
||||
showTemplateChoice: true,
|
||||
isDeleting: false, // Stop showing "Deleting..." and show template choice
|
||||
}))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate to next workflow or workspace home
|
||||
if (nextWorkflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
|
||||
} else {
|
||||
router.push(`/workspace/${workspaceId}`)
|
||||
// No templates, proceed with standard deletion
|
||||
navigateAfterDeletion(currentWorkflowId)
|
||||
await deleteWorkflowWithTemplates(currentWorkflowId)
|
||||
resetDeleteState()
|
||||
} catch (error) {
|
||||
logger.error('Error deleting workflow:', error)
|
||||
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
|
||||
}
|
||||
}, [
|
||||
params.workflowId,
|
||||
userPermissions.canEdit,
|
||||
checkPublishedTemplates,
|
||||
navigateAfterDeletion,
|
||||
deleteWorkflowWithTemplates,
|
||||
resetDeleteState,
|
||||
])
|
||||
|
||||
// Remove the workflow from the registry using the URL parameter
|
||||
useWorkflowRegistry.getState().removeWorkflow(currentWorkflowId)
|
||||
}
|
||||
/**
|
||||
* Handle template action selection
|
||||
*/
|
||||
const handleTemplateAction = useCallback(
|
||||
async (action: 'keep' | 'delete') => {
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
if (!currentWorkflowId || !userPermissions.canEdit) return
|
||||
|
||||
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
|
||||
|
||||
try {
|
||||
logger.info(`Deleting workflow ${currentWorkflowId} with template action: ${action}`)
|
||||
|
||||
navigateAfterDeletion(currentWorkflowId)
|
||||
await deleteWorkflowWithTemplates(currentWorkflowId, action)
|
||||
|
||||
logger.info(
|
||||
`Successfully deleted workflow ${currentWorkflowId} with template action: ${action}`
|
||||
)
|
||||
resetDeleteState()
|
||||
} catch (error) {
|
||||
logger.error('Error deleting workflow:', error)
|
||||
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
|
||||
}
|
||||
},
|
||||
[
|
||||
params.workflowId,
|
||||
userPermissions.canEdit,
|
||||
navigateAfterDeletion,
|
||||
deleteWorkflowWithTemplates,
|
||||
resetDeleteState,
|
||||
]
|
||||
)
|
||||
|
||||
// Helper function to open subscription settings
|
||||
const openSubscriptionSettings = () => {
|
||||
@@ -422,7 +565,23 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
}
|
||||
|
||||
return (
|
||||
<AlertDialog>
|
||||
<AlertDialog
|
||||
open={deleteState.showDialog}
|
||||
onOpenChange={(open) => {
|
||||
if (open) {
|
||||
// Reset all state when opening dialog to ensure clean start
|
||||
setDeleteState({
|
||||
showDialog: true,
|
||||
isDeleting: false,
|
||||
hasPublishedTemplates: false,
|
||||
publishedTemplates: [],
|
||||
showTemplateChoice: false,
|
||||
})
|
||||
} else {
|
||||
resetDeleteState()
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<AlertDialogTrigger asChild>
|
||||
@@ -444,21 +603,71 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Delete workflow?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
Deleting this workflow will permanently remove all associated blocks, executions, and
|
||||
configuration.{' '}
|
||||
<span className='text-red-500 dark:text-red-500'>This action cannot be undone.</span>
|
||||
</AlertDialogDescription>
|
||||
<AlertDialogTitle>
|
||||
{deleteState.showTemplateChoice ? 'Published Templates Found' : 'Delete workflow?'}
|
||||
</AlertDialogTitle>
|
||||
{deleteState.showTemplateChoice ? (
|
||||
<div className='space-y-3'>
|
||||
<AlertDialogDescription>
|
||||
This workflow has {deleteState.publishedTemplates.length} published template
|
||||
{deleteState.publishedTemplates.length > 1 ? 's' : ''}:
|
||||
</AlertDialogDescription>
|
||||
{deleteState.publishedTemplates.length > 0 && (
|
||||
<ul className='list-disc space-y-1 pl-6'>
|
||||
{deleteState.publishedTemplates.map((template) => (
|
||||
<li key={template.id}>{template.name}</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
<AlertDialogDescription>
|
||||
What would you like to do with the published template
|
||||
{deleteState.publishedTemplates.length > 1 ? 's' : ''}?
|
||||
</AlertDialogDescription>
|
||||
</div>
|
||||
) : (
|
||||
<AlertDialogDescription>
|
||||
Deleting this workflow will permanently remove all associated blocks, executions,
|
||||
and configuration.{' '}
|
||||
<span className='text-red-500 dark:text-red-500'>
|
||||
This action cannot be undone.
|
||||
</span>
|
||||
</AlertDialogDescription>
|
||||
)}
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter className='flex'>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={handleDeleteWorkflow}
|
||||
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
Delete
|
||||
</AlertDialogAction>
|
||||
{deleteState.showTemplateChoice ? (
|
||||
<div className='flex w-full gap-2'>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={() => handleTemplateAction('keep')}
|
||||
disabled={deleteState.isDeleting}
|
||||
className='h-9 flex-1 rounded-[8px]'
|
||||
>
|
||||
Keep templates
|
||||
</Button>
|
||||
<Button
|
||||
onClick={() => handleTemplateAction('delete')}
|
||||
disabled={deleteState.isDeleting}
|
||||
className='h-9 flex-1 rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
{deleteState.isDeleting ? 'Deleting...' : 'Delete templates'}
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
|
||||
<Button
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
handleDeleteWorkflow()
|
||||
}}
|
||||
disabled={deleteState.isDeleting}
|
||||
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
{deleteState.isDeleting ? 'Deleting...' : 'Delete'}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
@@ -1002,10 +1211,10 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
{renderToggleButton()}
|
||||
{isExpanded && <ExportControls />}
|
||||
{isExpanded && renderAutoLayoutButton()}
|
||||
{renderDuplicateButton()}
|
||||
{renderDeleteButton()}
|
||||
{!isDebugging && renderDebugModeToggle()}
|
||||
{isExpanded && renderPublishButton()}
|
||||
{renderDeleteButton()}
|
||||
{renderDuplicateButton()}
|
||||
{!isDebugging && renderDebugModeToggle()}
|
||||
{renderDeployButton()}
|
||||
{isDebugging ? renderDebugControlsBar() : renderRunButton()}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Check, Eye, X } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
@@ -201,6 +201,34 @@ export function DiffControls() {
|
||||
logger.warn('Failed to clear preview YAML:', error)
|
||||
})
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
||||
try {
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
} catch {}
|
||||
|
||||
// Accept changes without blocking the UI; errors will be logged by the store handler
|
||||
acceptChanges().catch((error) => {
|
||||
logger.error('Failed to accept changes (background):', error)
|
||||
@@ -224,6 +252,34 @@ export function DiffControls() {
|
||||
logger.warn('Failed to clear preview YAML:', error)
|
||||
})
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal rejected state in the copilot store
|
||||
try {
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('rejected', id)
|
||||
} catch {}
|
||||
|
||||
// Reject changes optimistically
|
||||
rejectChanges().catch((error) => {
|
||||
logger.error('Failed to reject changes (background):', error)
|
||||
@@ -232,58 +288,39 @@ export function DiffControls() {
|
||||
|
||||
return (
|
||||
<div className='-translate-x-1/2 fixed bottom-20 left-1/2 z-30'>
|
||||
<div className='rounded-lg border bg-background/95 p-4 shadow-lg backdrop-blur-sm'>
|
||||
<div className='flex items-center gap-4'>
|
||||
{/* Info section */}
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='flex h-8 w-8 items-center justify-center rounded-full bg-purple-100 dark:bg-purple-900'>
|
||||
<Eye className='h-4 w-4 text-purple-600 dark:text-purple-400' />
|
||||
</div>
|
||||
<div className='flex flex-col'>
|
||||
<span className='font-medium text-sm'>
|
||||
{isShowingDiff ? 'Viewing Proposed Changes' : 'Copilot has proposed changes'}
|
||||
</span>
|
||||
{diffMetadata && (
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
Source: {diffMetadata.source} •{' '}
|
||||
{new Date(diffMetadata.timestamp).toLocaleTimeString()}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* Toggle (left, icon-only, no background) */}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={handleToggleDiff}
|
||||
className='h-8 rounded-full px-2 text-muted-foreground hover:bg-transparent'
|
||||
title={isShowingDiff ? 'View original' : 'Preview changes'}
|
||||
>
|
||||
{isShowingDiff ? <Eye className='h-5 w-5' /> : <EyeOff className='h-5 w-5' />}
|
||||
</Button>
|
||||
|
||||
{/* Controls */}
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* Toggle View Button */}
|
||||
<Button
|
||||
variant={isShowingDiff ? 'default' : 'outline'}
|
||||
size='sm'
|
||||
onClick={handleToggleDiff}
|
||||
className='h-8'
|
||||
>
|
||||
{isShowingDiff ? 'View Original' : 'Preview Changes'}
|
||||
</Button>
|
||||
{/* Reject (middle, light gray, icon-only) */}
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={handleReject}
|
||||
className='h-8 rounded-[6px] border-gray-200 bg-gray-100 px-3 text-gray-700 hover:bg-gray-200 dark:border-gray-700 dark:bg-gray-800 dark:text-gray-200 dark:hover:bg-gray-700'
|
||||
title='Reject changes'
|
||||
>
|
||||
Reject
|
||||
</Button>
|
||||
|
||||
{/* Accept/Reject buttons - only show when viewing diff */}
|
||||
{isShowingDiff && (
|
||||
<>
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={handleAccept}
|
||||
className='h-8 bg-green-600 px-3 hover:bg-green-700'
|
||||
>
|
||||
<Check className='mr-1 h-3 w-3' />
|
||||
Accept
|
||||
</Button>
|
||||
<Button variant='destructive' size='sm' onClick={handleReject} className='h-8 px-3'>
|
||||
<X className='mr-1 h-3 w-3' />
|
||||
Reject
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Accept (right, brand purple, icon-only) */}
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={handleAccept}
|
||||
className='h-8 rounded-[6px] bg-[var(--brand-primary-hover-hex)] px-3 text-white hover:bg-[var(--brand-primary-hover-hex)]/90 hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
title='Accept changes'
|
||||
>
|
||||
Accept
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { type KeyboardEvent, useCallback, useEffect, useMemo, useRef, useState }
|
||||
import { ArrowDown, ArrowUp } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Notice } from '@/components/ui/notice'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
@@ -32,12 +33,11 @@ interface ChatFile {
|
||||
}
|
||||
|
||||
interface ChatProps {
|
||||
panelWidth: number
|
||||
chatMessage: string
|
||||
setChatMessage: (message: string) => void
|
||||
}
|
||||
|
||||
export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
export function Chat({ chatMessage, setChatMessage }: ChatProps) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
const {
|
||||
@@ -63,6 +63,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
// File upload state
|
||||
const [chatFiles, setChatFiles] = useState<ChatFile[]>([])
|
||||
const [isUploadingFiles, setIsUploadingFiles] = useState(false)
|
||||
const [uploadErrors, setUploadErrors] = useState<string[]>([])
|
||||
const [dragCounter, setDragCounter] = useState(0)
|
||||
const isDragOver = dragCounter > 0
|
||||
// Scroll state
|
||||
@@ -280,11 +281,15 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
type: chatFile.type,
|
||||
file: chatFile.file, // Pass the actual File object
|
||||
}))
|
||||
workflowInput.onUploadError = (message: string) => {
|
||||
setUploadErrors((prev) => [...prev, message])
|
||||
}
|
||||
}
|
||||
|
||||
// Clear input and files, refocus immediately
|
||||
setChatMessage('')
|
||||
setChatFiles([])
|
||||
setUploadErrors([])
|
||||
focusInput(10)
|
||||
|
||||
// Execute the workflow to generate a response
|
||||
@@ -560,14 +565,16 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
No messages yet
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea ref={scrollAreaRef} className='h-full pb-2' hideScrollbar={true}>
|
||||
<div>
|
||||
{workflowMessages.map((message) => (
|
||||
<ChatMessage key={message.id} message={message} />
|
||||
))}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</ScrollArea>
|
||||
<div ref={scrollAreaRef} className='h-full'>
|
||||
<ScrollArea className='h-full pb-2' hideScrollbar={true}>
|
||||
<div>
|
||||
{workflowMessages.map((message) => (
|
||||
<ChatMessage key={message.id} message={message} />
|
||||
))}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Scroll to bottom button */}
|
||||
@@ -615,26 +622,68 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
if (!(!activeWorkflowId || isExecuting || isUploadingFiles)) {
|
||||
const droppedFiles = Array.from(e.dataTransfer.files)
|
||||
if (droppedFiles.length > 0) {
|
||||
const newFiles = droppedFiles.slice(0, 5 - chatFiles.length).map((file) => ({
|
||||
id: crypto.randomUUID(),
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
file,
|
||||
}))
|
||||
setChatFiles([...chatFiles, ...newFiles])
|
||||
const remainingSlots = Math.max(0, 5 - chatFiles.length)
|
||||
const candidateFiles = droppedFiles.slice(0, remainingSlots)
|
||||
const errors: string[] = []
|
||||
const validNewFiles: ChatFile[] = []
|
||||
|
||||
for (const file of candidateFiles) {
|
||||
if (file.size > 10 * 1024 * 1024) {
|
||||
errors.push(`${file.name} is too large (max 10MB)`)
|
||||
continue
|
||||
}
|
||||
|
||||
const isDuplicate = chatFiles.some(
|
||||
(existingFile) =>
|
||||
existingFile.name === file.name && existingFile.size === file.size
|
||||
)
|
||||
if (isDuplicate) {
|
||||
errors.push(`${file.name} already added`)
|
||||
continue
|
||||
}
|
||||
|
||||
validNewFiles.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
file,
|
||||
})
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
setUploadErrors(errors)
|
||||
}
|
||||
|
||||
if (validNewFiles.length > 0) {
|
||||
setChatFiles([...chatFiles, ...validNewFiles])
|
||||
}
|
||||
}
|
||||
}
|
||||
}}
|
||||
>
|
||||
{/* File upload section */}
|
||||
<div className='mb-2'>
|
||||
{uploadErrors.length > 0 && (
|
||||
<div className='mb-2'>
|
||||
<Notice variant='error' title='File upload error'>
|
||||
<ul className='list-disc pl-5'>
|
||||
{uploadErrors.map((err, idx) => (
|
||||
<li key={idx}>{err}</li>
|
||||
))}
|
||||
</ul>
|
||||
</Notice>
|
||||
</div>
|
||||
)}
|
||||
<ChatFileUpload
|
||||
files={chatFiles}
|
||||
onFilesChange={setChatFiles}
|
||||
onFilesChange={(files) => {
|
||||
setChatFiles(files)
|
||||
}}
|
||||
maxFiles={5}
|
||||
maxSize={10}
|
||||
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
||||
onError={(errors) => setUploadErrors(errors)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ interface ChatFileUploadProps {
|
||||
maxSize?: number // in MB
|
||||
acceptedTypes?: string[]
|
||||
disabled?: boolean
|
||||
onError?: (errors: string[]) => void
|
||||
}
|
||||
|
||||
export function ChatFileUpload({
|
||||
@@ -30,6 +31,7 @@ export function ChatFileUpload({
|
||||
maxSize = 10,
|
||||
acceptedTypes = ['*'],
|
||||
disabled = false,
|
||||
onError,
|
||||
}: ChatFileUploadProps) {
|
||||
const [isDragOver, setIsDragOver] = useState(false)
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
@@ -91,7 +93,7 @@ export function ChatFileUpload({
|
||||
|
||||
if (errors.length > 0) {
|
||||
logger.warn('File upload errors:', errors)
|
||||
// You could show these errors in a toast or alert
|
||||
onError?.(errors)
|
||||
}
|
||||
|
||||
if (newFiles.length > 0) {
|
||||
@@ -168,7 +170,12 @@ export function ChatFileUpload({
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
multiple
|
||||
onChange={(e) => handleFileSelect(e.target.files)}
|
||||
onChange={(e) => {
|
||||
handleFileSelect(e.target.files)
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
}
|
||||
}}
|
||||
className='hidden'
|
||||
accept={acceptedTypes.join(',')}
|
||||
disabled={disabled}
|
||||
|
||||
@@ -25,7 +25,7 @@ export function Console({ panelWidth }: ConsoleProps) {
|
||||
No console entries
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<ScrollArea className='h-full' hideScrollbar={false}>
|
||||
<div className='space-y-3'>
|
||||
{filteredEntries.map((entry) => (
|
||||
<ConsoleEntry key={entry.id} entry={entry} consoleWidth={panelWidth} />
|
||||
|
||||
@@ -78,6 +78,14 @@ if (typeof document !== 'undefined') {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
/* Reduce top margin for first heading (e.g., right after thinking block) */
|
||||
.copilot-markdown-wrapper > h1:first-child,
|
||||
.copilot-markdown-wrapper > h2:first-child,
|
||||
.copilot-markdown-wrapper > h3:first-child,
|
||||
.copilot-markdown-wrapper > h4:first-child {
|
||||
margin-top: 0.25rem !important;
|
||||
}
|
||||
`
|
||||
document.head.appendChild(style)
|
||||
}
|
||||
@@ -140,17 +148,17 @@ export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRend
|
||||
|
||||
// Headings
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-10 mb-5 font-geist-sans font-semibold text-2xl text-gray-900 dark:text-gray-100'>
|
||||
<h1 className='mt-3 mb-3 font-geist-sans font-semibold text-2xl text-gray-900 dark:text-gray-100'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-8 mb-4 font-geist-sans font-semibold text-gray-900 text-xl dark:text-gray-100'>
|
||||
<h2 className='mt-2.5 mb-2.5 font-geist-sans font-semibold text-gray-900 text-xl dark:text-gray-100'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-7 mb-3 font-geist-sans font-semibold text-gray-900 text-lg dark:text-gray-100'>
|
||||
<h3 className='mt-2 mb-2 font-geist-sans font-semibold text-gray-900 text-lg dark:text-gray-100'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
|
||||
@@ -19,6 +19,8 @@ export function ThinkingBlock({
|
||||
}: ThinkingBlockProps) {
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [duration, setDuration] = useState(persistedDuration ?? 0)
|
||||
// Track if the user explicitly collapsed while streaming; sticky per block instance
|
||||
const userCollapsedRef = useRef<boolean>(false)
|
||||
// Keep a stable reference to start time that updates when prop changes
|
||||
const startTimeRef = useRef<number>(persistedStartTime ?? Date.now())
|
||||
useEffect(() => {
|
||||
@@ -28,13 +30,14 @@ export function ThinkingBlock({
|
||||
}, [persistedStartTime])
|
||||
|
||||
useEffect(() => {
|
||||
// Auto-collapse when streaming ends
|
||||
// Auto-collapse when streaming ends and reset userCollapsed flag
|
||||
if (!isStreaming) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
return
|
||||
}
|
||||
// Expand once there is visible content while streaming
|
||||
if (content && content.trim().length > 0) {
|
||||
// Expand once there is visible content while streaming, unless user collapsed
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, content])
|
||||
@@ -65,9 +68,16 @@ export function ThinkingBlock({
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='my-1'>
|
||||
<div className='mt-1 mb-0'>
|
||||
<button
|
||||
onClick={() => setIsExpanded((v) => !v)}
|
||||
onClick={() => {
|
||||
setIsExpanded((v) => {
|
||||
const next = !v
|
||||
// If user collapses during streaming, remember to not auto-expand again
|
||||
if (!next && isStreaming) userCollapsedRef.current = true
|
||||
return next
|
||||
})
|
||||
}}
|
||||
className={cn(
|
||||
'mb-1 inline-flex items-center gap-1 text-gray-400 text-xs transition-colors hover:text-gray-500',
|
||||
'font-normal italic'
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
ThumbsUp,
|
||||
X,
|
||||
} from 'lucide-react'
|
||||
import { InlineToolCall } from '@/lib/copilot/tools/inline-tool-call'
|
||||
import { InlineToolCall } from '@/lib/copilot/inline-tool-call'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { usePreviewStore } from '@/stores/copilot/preview-store'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
@@ -594,18 +594,14 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
if (block.type === 'tool_call') {
|
||||
// Skip hidden tools (like checkoff_todo)
|
||||
if (block.toolCall.hidden) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Visibility and filtering handled by InlineToolCall
|
||||
return (
|
||||
<div
|
||||
key={`tool-${block.toolCall.id}`}
|
||||
className='transition-opacity duration-300 ease-in-out'
|
||||
style={{ opacity: 1 }}
|
||||
>
|
||||
<InlineToolCall toolCall={block.toolCall} />
|
||||
<InlineToolCall toolCallId={block.toolCall.id} toolCall={block.toolCall} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -625,7 +621,47 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='flex justify-end'>
|
||||
<div className='flex items-center justify-end gap-0'>
|
||||
{hasCheckpoints && (
|
||||
<div className='mr-1 inline-flex items-center justify-center'>
|
||||
{showRestoreConfirmation ? (
|
||||
<div className='inline-flex items-center gap-1'>
|
||||
<button
|
||||
onClick={handleConfirmRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Confirm restore'
|
||||
aria-label='Confirm restore'
|
||||
>
|
||||
{isRevertingCheckpoint ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
) : (
|
||||
<Check className='h-3 w-3' />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleCancelRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Cancel restore'
|
||||
aria-label='Cancel restore'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={handleRevertToCheckpoint}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Restore workflow to this checkpoint state'
|
||||
aria-label='Restore'
|
||||
>
|
||||
<RotateCcw className='h-3 w-3' />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className='min-w-0 max-w-[80%]'>
|
||||
{/* Message content in purple box */}
|
||||
<div
|
||||
@@ -639,55 +675,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
<WordWrap text={message.content} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Checkpoints below message */}
|
||||
{hasCheckpoints && (
|
||||
<div className='mt-1 flex justify-end'>
|
||||
<div className='inline-flex items-center gap-0.5 text-muted-foreground text-xs'>
|
||||
<span className='select-none'>
|
||||
Restore{showRestoreConfirmation && <span className='ml-0.5'>?</span>}
|
||||
</span>
|
||||
<div className='inline-flex w-8 items-center justify-center'>
|
||||
{showRestoreConfirmation ? (
|
||||
<div className='inline-flex items-center gap-1'>
|
||||
<button
|
||||
onClick={handleConfirmRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Confirm restore'
|
||||
aria-label='Confirm restore'
|
||||
>
|
||||
{isRevertingCheckpoint ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
) : (
|
||||
<Check className='h-3 w-3' />
|
||||
)}
|
||||
</button>
|
||||
<button
|
||||
onClick={handleCancelRevert}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Cancel restore'
|
||||
aria-label='Cancel restore'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
) : (
|
||||
<button
|
||||
onClick={handleRevertToCheckpoint}
|
||||
disabled={isRevertingCheckpoint}
|
||||
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
|
||||
title='Restore workflow to this checkpoint state'
|
||||
aria-label='Restore'
|
||||
>
|
||||
<RotateCcw className='h-3 w-3' />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -98,22 +98,22 @@ export const TodoList = memo(function TodoList({
|
||||
index !== todos.length - 1 && 'border-gray-50 border-b dark:border-gray-800'
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center rounded border transition-all',
|
||||
todo.executing
|
||||
? 'border-blue-400 dark:border-blue-500'
|
||||
: todo.completed
|
||||
{todo.executing ? (
|
||||
<div className='mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center'>
|
||||
<Loader2 className='h-3 w-3 animate-spin text-blue-500' />
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
'mt-0.5 flex h-4 w-4 flex-shrink-0 items-center justify-center rounded border transition-all',
|
||||
todo.completed
|
||||
? 'border-blue-500 bg-blue-500'
|
||||
: 'border-gray-300 dark:border-gray-600'
|
||||
)}
|
||||
>
|
||||
{todo.executing ? (
|
||||
<Loader2 className='h-3 w-3 animate-spin text-blue-500' />
|
||||
) : todo.completed ? (
|
||||
<Check className='h-3 w-3 text-white' strokeWidth={3} />
|
||||
) : null}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
{todo.completed ? <Check className='h-3 w-3 text-white' strokeWidth={3} /> : null}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
|
||||
@@ -16,10 +16,10 @@ export const CopilotSlider = React.forwardRef<
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<SliderPrimitive.Track className='relative h-2 w-full grow cursor-pointer overflow-hidden rounded-full bg-input'>
|
||||
<SliderPrimitive.Track className='relative h-1.5 w-full grow cursor-pointer overflow-hidden rounded-full bg-input'>
|
||||
<SliderPrimitive.Range className='absolute h-full bg-primary' />
|
||||
</SliderPrimitive.Track>
|
||||
<SliderPrimitive.Thumb className='block h-5 w-5 cursor-pointer rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50' />
|
||||
<SliderPrimitive.Thumb className='block h-3.5 w-3.5 cursor-pointer rounded-full border border-primary/60 bg-background shadow-sm transition-all hover:shadow focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-primary/40 focus-visible:ring-offset-0 disabled:pointer-events-none disabled:opacity-50' />
|
||||
</SliderPrimitive.Root>
|
||||
))
|
||||
CopilotSlider.displayName = 'CopilotSlider'
|
||||
|
||||
@@ -120,12 +120,15 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const setMessage =
|
||||
controlledValue !== undefined ? onControlledChange || (() => {}) : setInternalMessage
|
||||
|
||||
// Auto-resize textarea
|
||||
// Auto-resize textarea and toggle vertical scroll when exceeding max height
|
||||
useEffect(() => {
|
||||
const textarea = textareaRef.current
|
||||
if (textarea) {
|
||||
const maxHeight = 120
|
||||
textarea.style.height = 'auto'
|
||||
textarea.style.height = `${Math.min(textarea.scrollHeight, 120)}px` // Max height of 120px
|
||||
const nextHeight = Math.min(textarea.scrollHeight, maxHeight)
|
||||
textarea.style.height = `${nextHeight}px`
|
||||
textarea.style.overflowY = textarea.scrollHeight > maxHeight ? 'auto' : 'hidden'
|
||||
}
|
||||
}, [message])
|
||||
|
||||
@@ -431,6 +434,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
// Depth toggle state comes from global store; access via useCopilotStore
|
||||
const { agentDepth, agentPrefetch, setAgentDepth, setAgentPrefetch } = useCopilotStore()
|
||||
|
||||
// Ensure MAX mode is off for Fast and Balanced depths
|
||||
useEffect(() => {
|
||||
if (agentDepth < 2 && !agentPrefetch) {
|
||||
setAgentPrefetch(true)
|
||||
}
|
||||
}, [agentDepth, agentPrefetch, setAgentPrefetch])
|
||||
|
||||
const cycleDepth = () => {
|
||||
// 8 modes: depths 0-3, each with prefetch off/on. Cycle depth, then toggle prefetch when wrapping.
|
||||
const nextDepth = agentDepth === 3 ? 0 : ((agentDepth + 1) as 0 | 1 | 2 | 3)
|
||||
@@ -446,24 +456,27 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
|
||||
const getDepthLabelFor = (value: 0 | 1 | 2 | 3) => {
|
||||
return value === 0 ? 'Fast' : value === 1 ? 'Balanced' : value === 2 ? 'Advanced' : 'Expert'
|
||||
return value === 0 ? 'Fast' : value === 1 ? 'Balanced' : value === 2 ? 'Advanced' : 'Behemoth'
|
||||
}
|
||||
|
||||
// Removed descriptive suffixes; concise labels only
|
||||
const getDepthDescription = (value: 0 | 1 | 2 | 3) => {
|
||||
if (value === 0)
|
||||
return 'Fastest and cheapest. Good for small edits, simple workflows, and small tasks.'
|
||||
if (value === 1) return 'Balances speed and reasoning. Good fit for most tasks.'
|
||||
return 'Fastest and cheapest. Good for small edits, simple workflows, and small tasks'
|
||||
if (value === 1) return 'Balances speed and reasoning. Good fit for most tasks'
|
||||
if (value === 2)
|
||||
return 'More reasoning for larger workflows and complex edits, still balanced for speed.'
|
||||
return 'Maximum reasoning power. Best for complex workflow building and debugging.'
|
||||
return 'More reasoning for larger workflows and complex edits, still balanced for speed'
|
||||
return 'Maximum reasoning power. Best for complex workflow building and debugging'
|
||||
}
|
||||
|
||||
const getDepthIconFor = (value: 0 | 1 | 2 | 3) => {
|
||||
if (value === 0) return <Zap className='h-3 w-3 text-muted-foreground' />
|
||||
if (value === 1) return <InfinityIcon className='h-3 w-3 text-muted-foreground' />
|
||||
if (value === 2) return <Brain className='h-3 w-3 text-muted-foreground' />
|
||||
return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
|
||||
const colorClass = !agentPrefetch
|
||||
? 'text-[var(--brand-primary-hover-hex)]'
|
||||
: 'text-muted-foreground'
|
||||
if (value === 0) return <Zap className={`h-3 w-3 ${colorClass}`} />
|
||||
if (value === 1) return <InfinityIcon className={`h-3 w-3 ${colorClass}`} />
|
||||
if (value === 2) return <Brain className={`h-3 w-3 ${colorClass}`} />
|
||||
return <BrainCircuit className={`h-3 w-3 ${colorClass}`} />
|
||||
}
|
||||
|
||||
const getDepthIcon = () => getDepthIconFor(agentDepth)
|
||||
@@ -550,7 +563,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
placeholder={isDragging ? 'Drop files here...' : placeholder}
|
||||
disabled={disabled}
|
||||
rows={1}
|
||||
className='mb-2 min-h-[32px] w-full resize-none overflow-hidden border-0 bg-transparent px-[2px] py-1 text-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
className='mb-2 min-h-[32px] w-full resize-none overflow-y-auto overflow-x-hidden border-0 bg-transparent px-[2px] py-1 text-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
style={{ height: 'auto' }}
|
||||
/>
|
||||
|
||||
@@ -636,7 +649,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='flex h-6 items-center gap-1.5 rounded-full border px-2 py-1 font-medium text-xs'
|
||||
className={cn(
|
||||
'flex h-6 items-center gap-1.5 rounded-full border px-2 py-1 font-medium text-xs',
|
||||
!agentPrefetch
|
||||
? 'border-[var(--brand-primary-hover-hex)] text-[var(--brand-primary-hover-hex)] hover:bg-[color-mix(in_srgb,var(--brand-primary-hover-hex)_8%,transparent)] hover:text-[var(--brand-primary-hover-hex)]'
|
||||
: 'border-border text-foreground'
|
||||
)}
|
||||
title='Choose mode'
|
||||
>
|
||||
{getDepthIcon()}
|
||||
@@ -666,12 +684,25 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
|
||||
>
|
||||
Significantly increases depth of reasoning
|
||||
<br />
|
||||
<span className='text-[10px] text-muted-foreground italic'>
|
||||
Only available in Advanced and Behemoth modes
|
||||
</span>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
checked={!agentPrefetch}
|
||||
onCheckedChange={(checked) => setAgentPrefetch(!checked)}
|
||||
disabled={agentDepth < 2}
|
||||
title={
|
||||
agentDepth < 2
|
||||
? 'MAX mode is only available for Advanced or Expert'
|
||||
: undefined
|
||||
}
|
||||
onCheckedChange={(checked) => {
|
||||
if (agentDepth < 2) return
|
||||
setAgentPrefetch(!checked)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<div className='my-2 flex justify-center'>
|
||||
@@ -680,9 +711,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<div className='mb-3'>
|
||||
<div className='mb-2 flex items-center justify-between'>
|
||||
<span className='font-medium text-xs'>Mode</span>
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{getDepthLabelFor(agentDepth)}
|
||||
</span>
|
||||
<div className='flex items-center gap-1'>
|
||||
{getDepthIconFor(agentDepth)}
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{getDepthLabelFor(agentDepth)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className='relative'>
|
||||
<Slider
|
||||
|
||||
@@ -302,12 +302,12 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
const previewToolCall = lastMessage.toolCalls.find(
|
||||
(tc) =>
|
||||
tc.name === COPILOT_TOOL_IDS.BUILD_WORKFLOW &&
|
||||
tc.state === 'completed' &&
|
||||
tc.state === 'success' &&
|
||||
!isToolCallSeen(tc.id)
|
||||
)
|
||||
|
||||
if (previewToolCall?.result) {
|
||||
logger.info('Preview workflow completed via native SSE - handling result')
|
||||
if (previewToolCall) {
|
||||
logger.info('Preview workflow completed via native SSE')
|
||||
// Mark as seen to prevent duplicate processing
|
||||
markToolCallAsSeen(previewToolCall.id)
|
||||
// Tool call handling logic would go here if needed
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { AlertTriangle, ChevronDown, Copy, MoreVertical, Plus, Trash } from 'lucide-react'
|
||||
import {
|
||||
AlertTriangle,
|
||||
ChevronDown,
|
||||
Copy,
|
||||
Maximize2,
|
||||
Minimize2,
|
||||
MoreVertical,
|
||||
Plus,
|
||||
Trash,
|
||||
} from 'lucide-react'
|
||||
import { highlight, languages } from 'prismjs'
|
||||
import 'prismjs/components/prism-javascript'
|
||||
import 'prismjs/themes/prism.css'
|
||||
@@ -52,6 +61,16 @@ export function Variables() {
|
||||
// Track which variables are currently being edited
|
||||
const [_activeEditors, setActiveEditors] = useState<Record<string, boolean>>({})
|
||||
|
||||
// Collapsed state per variable
|
||||
const [collapsedById, setCollapsedById] = useState<Record<string, boolean>>({})
|
||||
|
||||
const toggleCollapsed = (variableId: string) => {
|
||||
setCollapsedById((prev) => ({
|
||||
...prev,
|
||||
[variableId]: !prev[variableId],
|
||||
}))
|
||||
}
|
||||
|
||||
// Handle variable name change with validation
|
||||
const handleVariableNameChange = (variableId: string, newName: string) => {
|
||||
const validatedName = validateName(newName)
|
||||
@@ -220,7 +239,7 @@ export function Variables() {
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<ScrollArea className='h-full' hideScrollbar={false}>
|
||||
<div className='space-y-4'>
|
||||
{workflowVariables.map((variable) => (
|
||||
<div key={variable.id} className='space-y-2'>
|
||||
@@ -298,6 +317,17 @@ export function Variables() {
|
||||
align='end'
|
||||
className='min-w-32 rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
|
||||
>
|
||||
<DropdownMenuItem
|
||||
onClick={() => toggleCollapsed(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
>
|
||||
{(collapsedById[variable.id] ?? false) ? (
|
||||
<Maximize2 className='mr-2 h-4 w-4 text-muted-foreground' />
|
||||
) : (
|
||||
<Minimize2 className='mr-2 h-4 w-4 text-muted-foreground' />
|
||||
)}
|
||||
{(collapsedById[variable.id] ?? false) ? 'Expand' : 'Collapse'}
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => collaborativeDuplicateVariable(variable.id)}
|
||||
className='cursor-pointer rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
|
||||
@@ -317,71 +347,75 @@ export function Variables() {
|
||||
</div>
|
||||
|
||||
{/* Value area */}
|
||||
<div className='relative rounded-lg bg-secondary/50'>
|
||||
{/* Validation indicator */}
|
||||
{variable.value !== '' && getValidationStatus(variable) && (
|
||||
<div className='absolute top-2 right-2 z-10'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='cursor-help'>
|
||||
<AlertTriangle className='h-3 w-3 text-muted-foreground' />
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom' className='max-w-xs'>
|
||||
<p>{getValidationStatus(variable)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
{!(collapsedById[variable.id] ?? false) && (
|
||||
<div className='relative rounded-lg bg-secondary/50'>
|
||||
{/* Validation indicator */}
|
||||
{variable.value !== '' && getValidationStatus(variable) && (
|
||||
<div className='absolute top-2 right-2 z-10'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='cursor-help'>
|
||||
<AlertTriangle className='h-3 w-3 text-muted-foreground' />
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom' className='max-w-xs'>
|
||||
<p>{getValidationStatus(variable)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Editor */}
|
||||
<div className='relative overflow-hidden'>
|
||||
<div
|
||||
className='relative min-h-[36px] w-full max-w-full px-3 py-2 font-normal text-sm'
|
||||
ref={(el) => {
|
||||
editorRefs.current[variable.id] = el
|
||||
}}
|
||||
style={{ maxWidth: '100%' }}
|
||||
>
|
||||
{variable.value === '' && (
|
||||
<div className='pointer-events-none absolute inset-0 flex select-none items-start justify-start px-3 py-2 font-[380] text-muted-foreground text-sm leading-normal'>
|
||||
<div style={{ lineHeight: '20px' }}>{getPlaceholder(variable.type)}</div>
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
key={`editor-${variable.id}-${variable.type}`}
|
||||
value={formatValue(variable)}
|
||||
onValueChange={handleEditorChange.bind(null, variable)}
|
||||
onBlur={() => handleEditorBlur(variable.id)}
|
||||
onFocus={() => handleEditorFocus(variable.id)}
|
||||
highlight={(code) =>
|
||||
// Only apply syntax highlighting for non-basic text types
|
||||
variable.type === 'plain' || variable.type === 'string'
|
||||
? code
|
||||
: highlight(
|
||||
code,
|
||||
languages[getEditorLanguage(variable.type)],
|
||||
getEditorLanguage(variable.type)
|
||||
)
|
||||
}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: '20px',
|
||||
width: '100%',
|
||||
maxWidth: '100%',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-all',
|
||||
overflowWrap: 'break-word',
|
||||
minHeight: '20px',
|
||||
overflow: 'hidden',
|
||||
{/* Editor */}
|
||||
<div className='relative overflow-hidden'>
|
||||
<div
|
||||
className='relative min-h-[36px] w-full max-w-full px-3 py-2 font-normal text-sm'
|
||||
ref={(el) => {
|
||||
editorRefs.current[variable.id] = el
|
||||
}}
|
||||
className='[&>pre]:!max-w-full [&>pre]:!overflow-hidden [&>pre]:!whitespace-pre-wrap [&>pre]:!break-all [&>pre]:!overflow-wrap-break-word [&>textarea]:!max-w-full [&>textarea]:!overflow-hidden [&>textarea]:!whitespace-pre-wrap [&>textarea]:!break-all [&>textarea]:!overflow-wrap-break-word font-[380] text-foreground text-sm leading-normal focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full max-w-full whitespace-pre-wrap break-all overflow-wrap-break-word overflow-hidden font-[380] text-foreground'
|
||||
/>
|
||||
style={{ maxWidth: '100%' }}
|
||||
>
|
||||
{variable.value === '' && (
|
||||
<div className='pointer-events-none absolute inset-0 flex select-none items-start justify-start px-3 py-2 font-[380] text-muted-foreground text-sm leading-normal'>
|
||||
<div style={{ lineHeight: '20px' }}>
|
||||
{getPlaceholder(variable.type)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<Editor
|
||||
key={`editor-${variable.id}-${variable.type}`}
|
||||
value={formatValue(variable)}
|
||||
onValueChange={handleEditorChange.bind(null, variable)}
|
||||
onBlur={() => handleEditorBlur(variable.id)}
|
||||
onFocus={() => handleEditorFocus(variable.id)}
|
||||
highlight={(code) =>
|
||||
// Only apply syntax highlighting for non-basic text types
|
||||
variable.type === 'plain' || variable.type === 'string'
|
||||
? code
|
||||
: highlight(
|
||||
code,
|
||||
languages[getEditorLanguage(variable.type)],
|
||||
getEditorLanguage(variable.type)
|
||||
)
|
||||
}
|
||||
padding={0}
|
||||
style={{
|
||||
fontFamily: 'inherit',
|
||||
lineHeight: '20px',
|
||||
width: '100%',
|
||||
maxWidth: '100%',
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-all',
|
||||
overflowWrap: 'break-word',
|
||||
minHeight: '20px',
|
||||
overflow: 'hidden',
|
||||
}}
|
||||
className='[&>pre]:!max-w-full [&>pre]:!overflow-hidden [&>pre]:!whitespace-pre-wrap [&>pre]:!break-all [&>pre]:!overflow-wrap-break-word [&>textarea]:!max-w-full [&>textarea]:!overflow-hidden [&>textarea]:!whitespace-pre-wrap [&>textarea]:!break-all [&>textarea]:!overflow-wrap-break-word font-[380] text-foreground text-sm leading-normal focus:outline-none'
|
||||
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full max-w-full whitespace-pre-wrap break-all overflow-wrap-break-word overflow-hidden font-[380] text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
|
||||
@@ -492,11 +492,7 @@ export function Panel() {
|
||||
<div className='flex-1 overflow-hidden px-3'>
|
||||
{/* Keep all tabs mounted but hidden to preserve state and animations */}
|
||||
<div style={{ display: activeTab === 'chat' ? 'block' : 'none', height: '100%' }}>
|
||||
<Chat
|
||||
panelWidth={panelWidth}
|
||||
chatMessage={chatMessage}
|
||||
setChatMessage={setChatMessage}
|
||||
/>
|
||||
<Chat chatMessage={chatMessage} setChatMessage={setChatMessage} />
|
||||
</div>
|
||||
<div style={{ display: activeTab === 'console' ? 'block' : 'none', height: '100%' }}>
|
||||
<Console panelWidth={panelWidth} />
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import * as React from 'react'
|
||||
import { format } from 'date-fns'
|
||||
import { Calendar as CalendarIcon } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Calendar } from '@/components/ui/calendar'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
|
||||
|
||||
interface DateInputProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
placeholder?: string
|
||||
isPreview?: boolean
|
||||
previewValue?: string | null
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
export function DateInput({
|
||||
blockId,
|
||||
subBlockId,
|
||||
placeholder,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
disabled = false,
|
||||
}: DateInputProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const date = value ? new Date(value) : undefined
|
||||
|
||||
const isPastDate = React.useMemo(() => {
|
||||
if (!date) return false
|
||||
const today = new Date()
|
||||
today.setHours(0, 0, 0, 0)
|
||||
return date < today
|
||||
}, [date])
|
||||
|
||||
const handleDateSelect = (selectedDate: Date | undefined) => {
|
||||
if (isPreview || disabled) return
|
||||
|
||||
if (selectedDate) {
|
||||
const today = new Date()
|
||||
today.setHours(0, 0, 0, 0)
|
||||
}
|
||||
setStoreValue(selectedDate?.toISOString() || '')
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
disabled={isPreview || disabled}
|
||||
className={cn(
|
||||
'w-full justify-start text-left font-normal',
|
||||
!date && 'text-muted-foreground',
|
||||
isPastDate && 'border-red-500'
|
||||
)}
|
||||
>
|
||||
<CalendarIcon className='mr-1 h-4 w-4' />
|
||||
{date ? format(date, 'MMM d, yy') : <span>{placeholder || 'Pick a date'}</span>}
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className='w-auto p-0'>
|
||||
<Calendar mode='single' selected={date} onSelect={handleDateSelect} initialFocus />
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -22,6 +22,7 @@ interface DropdownProps {
|
||||
previewValue?: string | null
|
||||
disabled?: boolean
|
||||
placeholder?: string
|
||||
config?: import('@/blocks/types').SubBlockConfig
|
||||
}
|
||||
|
||||
export function Dropdown({
|
||||
@@ -34,6 +35,7 @@ export function Dropdown({
|
||||
previewValue,
|
||||
disabled,
|
||||
placeholder = 'Select an option...',
|
||||
config,
|
||||
}: DropdownProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<string>(blockId, subBlockId)
|
||||
const [storeInitialized, setStoreInitialized] = useState(false)
|
||||
@@ -281,7 +283,7 @@ export function Dropdown({
|
||||
|
||||
{/* Dropdown */}
|
||||
{open && (
|
||||
<div className='absolute top-full left-0 z-[100] mt-1 w-full min-w-[286px]'>
|
||||
<div className='absolute top-full left-0 z-[100] mt-1 w-full'>
|
||||
<div className='allow-scroll fade-in-0 zoom-in-95 animate-in rounded-md border bg-popover text-popover-foreground shadow-lg'>
|
||||
<div
|
||||
ref={dropdownRef}
|
||||
|
||||
@@ -237,10 +237,11 @@ export function GoogleDrivePicker({
|
||||
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const url = new URL('/api/auth/oauth/token', window.location.origin)
|
||||
url.searchParams.set('credentialId', effectiveCredentialId)
|
||||
// include workflowId if available via global registry (server adds session owner otherwise)
|
||||
const response = await fetch(url.toString())
|
||||
const response = await fetch('/api/auth/oauth/token', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ credentialId: effectiveCredentialId, workflowId }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch access token: ${response.status}`)
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
CommandList,
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { Logger } from '@/lib/logs/console/logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
type Credential,
|
||||
getProviderIdFromServiceId,
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = new Logger('JiraIssueSelector')
|
||||
const logger = createLogger('JiraIssueSelector')
|
||||
|
||||
export interface JiraIssueInfo {
|
||||
id: string
|
||||
|
||||
@@ -88,6 +88,8 @@ export function MicrosoftFileSelector({
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [credentialsLoaded, setCredentialsLoaded] = useState(false)
|
||||
const initialFetchRef = useRef(false)
|
||||
// Track the last (credentialId, fileId) we attempted to resolve to avoid tight retry loops
|
||||
const lastMetaAttemptRef = useRef<string>('')
|
||||
|
||||
// Handle Microsoft Planner task selection
|
||||
const [plannerTasks, setPlannerTasks] = useState<PlannerTask[]>([])
|
||||
@@ -496,11 +498,15 @@ export function MicrosoftFileSelector({
|
||||
setSelectedFileId('')
|
||||
onChange('')
|
||||
}
|
||||
// Reset memo when credential is cleared
|
||||
lastMetaAttemptRef.current = ''
|
||||
} else if (prevCredentialId && prevCredentialId !== selectedCredentialId) {
|
||||
// Credentials changed (not initial load) - clear file info to force refetch
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null)
|
||||
}
|
||||
// Reset memo when switching credentials
|
||||
lastMetaAttemptRef.current = ''
|
||||
}
|
||||
}, [selectedCredentialId, selectedFile, onChange])
|
||||
|
||||
@@ -514,10 +520,17 @@ export function MicrosoftFileSelector({
|
||||
(!selectedFile || selectedFile.id !== value) &&
|
||||
!isLoadingSelectedFile
|
||||
) {
|
||||
// Avoid tight retry loops by memoizing the last attempt tuple
|
||||
const attemptKey = `${selectedCredentialId}::${value}`
|
||||
if (lastMetaAttemptRef.current === attemptKey) {
|
||||
return
|
||||
}
|
||||
lastMetaAttemptRef.current = attemptKey
|
||||
|
||||
if (serviceId === 'microsoft-planner') {
|
||||
void fetchPlannerTaskById(value)
|
||||
} else {
|
||||
fetchFileById(value)
|
||||
void fetchFileById(value)
|
||||
}
|
||||
}
|
||||
}, [
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { getEnv } from '@/lib/env'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import {
|
||||
ConfluenceFileSelector,
|
||||
DiscordChannelSelector,
|
||||
@@ -73,8 +74,12 @@ export function FileSelectorInput({
|
||||
const [botTokenValue] = useSubBlockValue(blockId, 'botToken')
|
||||
|
||||
// Determine if the persisted credential belongs to the current viewer
|
||||
// Use service providerId where available (e.g., onedrive/sharepoint) instead of base provider ("microsoft")
|
||||
const foreignCheckProvider = subBlock.serviceId
|
||||
? getProviderIdFromServiceId(subBlock.serviceId)
|
||||
: (subBlock.provider as string) || ''
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
subBlock.provider || subBlock.serviceId || '',
|
||||
foreignCheckProvider,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
|
||||
@@ -224,12 +229,6 @@ export function FileSelectorInput({
|
||||
}
|
||||
onChange={(issueKey) => {
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, issueKey)
|
||||
// Clear related fields when a new issue is selected
|
||||
collaborativeSetSubblockValue(blockId, 'summary', '')
|
||||
collaborativeSetSubblockValue(blockId, 'description', '')
|
||||
if (!issueKey) {
|
||||
collaborativeSetSubblockValue(blockId, 'manualIssueKey', '')
|
||||
}
|
||||
}}
|
||||
domain={domain}
|
||||
provider='jira'
|
||||
@@ -353,7 +352,7 @@ export function FileSelectorInput({
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
serviceId={subBlock.serviceId}
|
||||
label={subBlock.placeholder || 'Select SharePoint site'}
|
||||
disabled={disabled || !credential}
|
||||
disabled={finalDisabled}
|
||||
showPreview={true}
|
||||
workflowId={activeWorkflowId || ''}
|
||||
credentialId={credential}
|
||||
@@ -389,7 +388,7 @@ export function FileSelectorInput({
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
serviceId='microsoft-planner'
|
||||
label={subBlock.placeholder || 'Select task'}
|
||||
disabled={disabled || !credential || !planId}
|
||||
disabled={finalDisabled}
|
||||
showPreview={true}
|
||||
planId={planId}
|
||||
workflowId={activeWorkflowId || ''}
|
||||
@@ -447,7 +446,7 @@ export function FileSelectorInput({
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
serviceId={subBlock.serviceId}
|
||||
label={subBlock.placeholder || 'Select Teams message location'}
|
||||
disabled={disabled || !credential}
|
||||
disabled={finalDisabled}
|
||||
showPreview={true}
|
||||
credential={credential}
|
||||
selectionType={selectionType}
|
||||
@@ -490,7 +489,7 @@ export function FileSelectorInput({
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
serviceId={subBlock.serviceId}
|
||||
label={subBlock.placeholder || `Select ${itemType}`}
|
||||
disabled={disabled || !credential}
|
||||
disabled={finalDisabled}
|
||||
showPreview={true}
|
||||
credentialId={credential}
|
||||
itemType={itemType}
|
||||
@@ -531,7 +530,7 @@ export function FileSelectorInput({
|
||||
provider={provider}
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
label={subBlock.placeholder || 'Select file'}
|
||||
disabled={disabled || !credential}
|
||||
disabled={finalDisabled}
|
||||
serviceId={subBlock.serviceId}
|
||||
mimeTypeFilter={subBlock.mimeType}
|
||||
showPreview={true}
|
||||
|
||||
@@ -4,7 +4,6 @@ export { Code } from './code'
|
||||
export { ComboBox } from './combobox'
|
||||
export { ConditionInput } from './condition-input'
|
||||
export { CredentialSelector } from './credential-selector/credential-selector'
|
||||
export { DateInput } from './date-input'
|
||||
export { DocumentSelector } from './document-selector/document-selector'
|
||||
export { Dropdown } from './dropdown'
|
||||
export { EvalInput } from './eval-input'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useRef, useState } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { ChevronDown, Plus, Trash } from 'lucide-react'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -8,10 +8,16 @@ import {
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/ui/dropdown-menu'
|
||||
import { formatDisplayText } from '@/components/ui/formatted-text'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui/select'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
|
||||
|
||||
@@ -59,27 +65,31 @@ export function FieldFormat({
|
||||
emptyMessage = 'No fields defined',
|
||||
showType = true,
|
||||
showValue = false,
|
||||
valuePlaceholder = 'Enter value or <variable.name>',
|
||||
valuePlaceholder = 'Enter test value',
|
||||
isConnecting = false,
|
||||
config,
|
||||
}: FieldFormatProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<Field[]>(blockId, subBlockId)
|
||||
const [tagDropdownStates, setTagDropdownStates] = useState<
|
||||
Record<
|
||||
string,
|
||||
{
|
||||
visible: boolean
|
||||
cursorPosition: number
|
||||
}
|
||||
>
|
||||
>({})
|
||||
const [dragHighlight, setDragHighlight] = useState<Record<string, boolean>>({})
|
||||
const valueInputRefs = useRef<Record<string, HTMLInputElement>>({})
|
||||
const valueInputRefs = useRef<Record<string, HTMLInputElement | HTMLTextAreaElement>>({})
|
||||
const [localValues, setLocalValues] = useState<Record<string, string>>({})
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const fields: Field[] = value || []
|
||||
|
||||
useEffect(() => {
|
||||
const initial: Record<string, string> = {}
|
||||
;(fields || []).forEach((f) => {
|
||||
if (localValues[f.id] === undefined) {
|
||||
initial[f.id] = (f.value as string) || ''
|
||||
}
|
||||
})
|
||||
if (Object.keys(initial).length > 0) {
|
||||
setLocalValues((prev) => ({ ...prev, ...initial }))
|
||||
}
|
||||
}, [fields])
|
||||
|
||||
// Field operations
|
||||
const addField = () => {
|
||||
if (isPreview || disabled) return
|
||||
@@ -88,12 +98,12 @@ export function FieldFormat({
|
||||
...DEFAULT_FIELD,
|
||||
id: crypto.randomUUID(),
|
||||
}
|
||||
setStoreValue([...fields, newField])
|
||||
setStoreValue([...(fields || []), newField])
|
||||
}
|
||||
|
||||
const removeField = (id: string) => {
|
||||
if (isPreview || disabled) return
|
||||
setStoreValue(fields.filter((field: Field) => field.id !== id))
|
||||
setStoreValue((fields || []).filter((field: Field) => field.id !== id))
|
||||
}
|
||||
|
||||
// Validate field name for API safety
|
||||
@@ -103,38 +113,22 @@ export function FieldFormat({
|
||||
return name.replace(/[\x00-\x1F"\\]/g, '').trim()
|
||||
}
|
||||
|
||||
// Tag dropdown handlers
|
||||
const handleValueInputChange = (fieldId: string, newValue: string) => {
|
||||
const input = valueInputRefs.current[fieldId]
|
||||
if (!input) return
|
||||
|
||||
const cursorPosition = input.selectionStart || 0
|
||||
const shouldShow = checkTagTrigger(newValue, cursorPosition)
|
||||
|
||||
setTagDropdownStates((prev) => ({
|
||||
...prev,
|
||||
[fieldId]: {
|
||||
visible: shouldShow.show,
|
||||
cursorPosition,
|
||||
},
|
||||
}))
|
||||
|
||||
updateField(fieldId, 'value', newValue)
|
||||
setLocalValues((prev) => ({ ...prev, [fieldId]: newValue }))
|
||||
}
|
||||
|
||||
const handleTagSelect = (fieldId: string, newValue: string) => {
|
||||
updateField(fieldId, 'value', newValue)
|
||||
setTagDropdownStates((prev) => ({
|
||||
...prev,
|
||||
[fieldId]: { ...prev[fieldId], visible: false },
|
||||
}))
|
||||
}
|
||||
// Value normalization: keep it simple for string types
|
||||
|
||||
const handleTagDropdownClose = (fieldId: string) => {
|
||||
setTagDropdownStates((prev) => ({
|
||||
...prev,
|
||||
[fieldId]: { ...prev[fieldId], visible: false },
|
||||
}))
|
||||
const handleValueInputBlur = (field: Field) => {
|
||||
if (isPreview || disabled) return
|
||||
|
||||
const inputEl = valueInputRefs.current[field.id]
|
||||
if (!inputEl) return
|
||||
|
||||
const current = localValues[field.id] ?? inputEl.value ?? ''
|
||||
const trimmed = current.trim()
|
||||
if (!trimmed) return
|
||||
updateField(field.id, 'value', current)
|
||||
}
|
||||
|
||||
// Drag and drop handlers for connection blocks
|
||||
@@ -152,47 +146,8 @@ export function FieldFormat({
|
||||
const handleDrop = (e: React.DragEvent, fieldId: string) => {
|
||||
e.preventDefault()
|
||||
setDragHighlight((prev) => ({ ...prev, [fieldId]: false }))
|
||||
|
||||
try {
|
||||
const data = JSON.parse(e.dataTransfer.getData('application/json'))
|
||||
if (data.type === 'connectionBlock' && data.connectionData) {
|
||||
const input = valueInputRefs.current[fieldId]
|
||||
if (!input) return
|
||||
|
||||
// Focus the input first
|
||||
input.focus()
|
||||
|
||||
// Get current cursor position or use end of field
|
||||
const dropPosition = input.selectionStart ?? (input.value?.length || 0)
|
||||
|
||||
// Insert '<' at drop position to trigger the dropdown
|
||||
const currentValue = input.value || ''
|
||||
const newValue = `${currentValue.slice(0, dropPosition)}<${currentValue.slice(dropPosition)}`
|
||||
|
||||
// Update the field value
|
||||
updateField(fieldId, 'value', newValue)
|
||||
|
||||
// Set cursor position and show dropdown
|
||||
setTimeout(() => {
|
||||
input.selectionStart = dropPosition + 1
|
||||
input.selectionEnd = dropPosition + 1
|
||||
|
||||
// Trigger dropdown by simulating the tag check
|
||||
const cursorPosition = dropPosition + 1
|
||||
const shouldShow = checkTagTrigger(newValue, cursorPosition)
|
||||
|
||||
setTagDropdownStates((prev) => ({
|
||||
...prev,
|
||||
[fieldId]: {
|
||||
visible: shouldShow.show,
|
||||
cursorPosition,
|
||||
},
|
||||
}))
|
||||
}, 0)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error handling drop:', error)
|
||||
}
|
||||
const input = valueInputRefs.current[fieldId]
|
||||
input?.focus()
|
||||
}
|
||||
|
||||
// Update handlers
|
||||
@@ -204,12 +159,14 @@ export function FieldFormat({
|
||||
value = validateFieldName(value)
|
||||
}
|
||||
|
||||
setStoreValue(fields.map((f: Field) => (f.id === id ? { ...f, [field]: value } : f)))
|
||||
setStoreValue((fields || []).map((f: Field) => (f.id === id ? { ...f, [field]: value } : f)))
|
||||
}
|
||||
|
||||
const toggleCollapse = (id: string) => {
|
||||
if (isPreview || disabled) return
|
||||
setStoreValue(fields.map((f: Field) => (f.id === id ? { ...f, collapsed: !f.collapsed } : f)))
|
||||
setStoreValue(
|
||||
(fields || []).map((f: Field) => (f.id === id ? { ...f, collapsed: !f.collapsed } : f))
|
||||
)
|
||||
}
|
||||
|
||||
// Field header
|
||||
@@ -371,54 +328,66 @@ export function FieldFormat({
|
||||
<div className='space-y-1.5'>
|
||||
<Label className='text-xs'>Value</Label>
|
||||
<div className='relative'>
|
||||
<Input
|
||||
ref={(el) => {
|
||||
if (el) valueInputRefs.current[field.id] = el
|
||||
}}
|
||||
name='value'
|
||||
value={field.value || ''}
|
||||
onChange={(e) => handleValueInputChange(field.id, e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Escape') {
|
||||
handleTagDropdownClose(field.id)
|
||||
{field.type === 'boolean' ? (
|
||||
<Select
|
||||
value={localValues[field.id] ?? (field.value as string) ?? ''}
|
||||
onValueChange={(v) => {
|
||||
setLocalValues((prev) => ({ ...prev, [field.id]: v }))
|
||||
if (!isPreview && !disabled) updateField(field.id, 'value', v)
|
||||
}}
|
||||
>
|
||||
<SelectTrigger className='h-9 w-full justify-between font-normal'>
|
||||
<SelectValue placeholder='Select value' className='truncate' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value='true'>true</SelectItem>
|
||||
<SelectItem value='false'>false</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
) : field.type === 'object' || field.type === 'array' ? (
|
||||
<Textarea
|
||||
ref={(el) => {
|
||||
if (el) valueInputRefs.current[field.id] = el
|
||||
}}
|
||||
name='value'
|
||||
value={localValues[field.id] ?? (field.value as string) ?? ''}
|
||||
onChange={(e) => handleValueInputChange(field.id, e.target.value)}
|
||||
onBlur={() => handleValueInputBlur(field)}
|
||||
placeholder={
|
||||
field.type === 'object' ? '{\n "key": "value"\n}' : '[\n 1, 2, 3\n]'
|
||||
}
|
||||
}}
|
||||
onDragOver={(e) => handleDragOver(e, field.id)}
|
||||
onDragLeave={(e) => handleDragLeave(e, field.id)}
|
||||
onDrop={(e) => handleDrop(e, field.id)}
|
||||
placeholder={valuePlaceholder}
|
||||
disabled={isPreview || disabled}
|
||||
className={cn(
|
||||
'h-9 text-transparent caret-foreground placeholder:text-muted-foreground/50',
|
||||
dragHighlight[field.id] && 'ring-2 ring-blue-500 ring-offset-2',
|
||||
isConnecting &&
|
||||
config?.connectionDroppable !== false &&
|
||||
'ring-2 ring-blue-500 ring-offset-2 focus-visible:ring-blue-500'
|
||||
)}
|
||||
/>
|
||||
{field.value && (
|
||||
<div className='pointer-events-none absolute inset-0 flex items-center px-3 py-2'>
|
||||
<div className='w-full overflow-hidden text-ellipsis whitespace-nowrap text-sm'>
|
||||
{formatDisplayText(field.value, true)}
|
||||
</div>
|
||||
</div>
|
||||
disabled={isPreview || disabled}
|
||||
className={cn(
|
||||
'min-h-[120px] font-mono text-sm placeholder:text-muted-foreground/50',
|
||||
dragHighlight[field.id] && 'ring-2 ring-blue-500 ring-offset-2',
|
||||
isConnecting &&
|
||||
config?.connectionDroppable !== false &&
|
||||
'ring-2 ring-blue-500 ring-offset-2 focus-visible:ring-blue-500'
|
||||
)}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
ref={(el) => {
|
||||
if (el) valueInputRefs.current[field.id] = el
|
||||
}}
|
||||
name='value'
|
||||
value={localValues[field.id] ?? field.value ?? ''}
|
||||
onChange={(e) => handleValueInputChange(field.id, e.target.value)}
|
||||
onBlur={() => handleValueInputBlur(field)}
|
||||
onDragOver={(e) => handleDragOver(e, field.id)}
|
||||
onDragLeave={(e) => handleDragLeave(e, field.id)}
|
||||
onDrop={(e) => handleDrop(e, field.id)}
|
||||
placeholder={valuePlaceholder}
|
||||
disabled={isPreview || disabled}
|
||||
className={cn(
|
||||
'h-9 placeholder:text-muted-foreground/50',
|
||||
dragHighlight[field.id] && 'ring-2 ring-blue-500 ring-offset-2',
|
||||
isConnecting &&
|
||||
config?.connectionDroppable !== false &&
|
||||
'ring-2 ring-blue-500 ring-offset-2 focus-visible:ring-blue-500'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
<TagDropdown
|
||||
visible={tagDropdownStates[field.id]?.visible || false}
|
||||
onSelect={(newValue) => handleTagSelect(field.id, newValue)}
|
||||
blockId={blockId}
|
||||
activeSourceBlockId={null}
|
||||
inputValue={field.value || ''}
|
||||
cursorPosition={tagDropdownStates[field.id]?.cursorPosition || 0}
|
||||
onClose={() => handleTagDropdownClose(field.id)}
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: '100%',
|
||||
left: 0,
|
||||
right: 0,
|
||||
zIndex: 9999,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -460,7 +429,7 @@ export function ResponseFormat(
|
||||
emptyMessage='No response fields defined'
|
||||
showType={false}
|
||||
showValue={true}
|
||||
valuePlaceholder='Enter value or <variable.name>'
|
||||
valuePlaceholder='Enter test value'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user