mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 23:17:59 -05:00
Compare commits
110 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7d536b7bc | ||
|
|
e83745fcaf | ||
|
|
3887733da5 | ||
|
|
614d826217 | ||
|
|
a0a4b21000 | ||
|
|
1f6dcd8465 | ||
|
|
30538d9380 | ||
|
|
6149489483 | ||
|
|
9ede001202 | ||
|
|
209d822ce9 | ||
|
|
31d9e2a4a8 | ||
|
|
e5080febd5 | ||
|
|
529fd44405 | ||
|
|
717b4dd2ff | ||
|
|
8aa86e0e9d | ||
|
|
148f0a6da3 | ||
|
|
14f422ef5e | ||
|
|
f27cb18883 | ||
|
|
e102b6cf17 | ||
|
|
50595c5c49 | ||
|
|
3c61bc167a | ||
|
|
ef681d8a04 | ||
|
|
df4971a876 | ||
|
|
f269fc9776 | ||
|
|
c65384d715 | ||
|
|
24e19a83a5 | ||
|
|
5c487f59f9 | ||
|
|
c45da7b93e | ||
|
|
cfc261d646 | ||
|
|
763d0de5d5 | ||
|
|
eade867d98 | ||
|
|
4a26b061a4 | ||
|
|
8176b37d89 | ||
|
|
610ea0b689 | ||
|
|
3c1914c566 | ||
|
|
218041dba3 | ||
|
|
a2827a52c0 | ||
|
|
6ca8311a76 | ||
|
|
37c4f835dd | ||
|
|
0b01d4bc78 | ||
|
|
a5883171f9 | ||
|
|
c2f786e40b | ||
|
|
3421eaec27 | ||
|
|
f6b25bf727 | ||
|
|
aa343fb62f | ||
|
|
cc249c2dd0 | ||
|
|
f1734766c3 | ||
|
|
e37f362459 | ||
|
|
bb9291aecc | ||
|
|
5dc3ba3379 | ||
|
|
684a8020d4 | ||
|
|
9097c520a5 | ||
|
|
bacb6f3831 | ||
|
|
2a0224f6ae | ||
|
|
6cb15a620a | ||
|
|
c7b77bd303 | ||
|
|
c0b8e1aca3 | ||
|
|
82cb609bb7 | ||
|
|
07cd6f9e49 | ||
|
|
c53e950269 | ||
|
|
2ce68aedf5 | ||
|
|
88282378ea | ||
|
|
1b3b85f4c4 | ||
|
|
4b60bba992 | ||
|
|
4aaa68d21b | ||
|
|
776ae06671 | ||
|
|
ccf5c2f6d8 | ||
|
|
02c41127c2 | ||
|
|
d1fe209d29 | ||
|
|
ee66c15ed9 | ||
|
|
d9046042af | ||
|
|
4fffc66ee0 | ||
|
|
a3159bcebc | ||
|
|
2354909ef9 | ||
|
|
caccb61362 | ||
|
|
3c7e7949d9 | ||
|
|
537fbdb2ce | ||
|
|
3460a7b39e | ||
|
|
d75751bbe6 | ||
|
|
2c9a4f4c3e | ||
|
|
767b63c57d | ||
|
|
b58d8773c9 | ||
|
|
3af1a6e100 | ||
|
|
840a028f92 | ||
|
|
7bc644a478 | ||
|
|
70a51006f6 | ||
|
|
17513d77ea | ||
|
|
6dc8b17bed | ||
|
|
70a5f4ec31 | ||
|
|
b9fa50b4de | ||
|
|
97021559cc | ||
|
|
76c0c56689 | ||
|
|
850447a604 | ||
|
|
0f21fbf705 | ||
|
|
3e45d793f1 | ||
|
|
5167deb75c | ||
|
|
02b7899861 | ||
|
|
7e4669108f | ||
|
|
ede224a15f | ||
|
|
5cf7d025db | ||
|
|
b4eda8fe6a | ||
|
|
60e2e6c735 | ||
|
|
c635b19548 | ||
|
|
f3bc1fc250 | ||
|
|
0bf9ce0b9e | ||
|
|
e22f0123a3 | ||
|
|
231bfb9add | ||
|
|
cac9ad250d | ||
|
|
78b5ae7b3d | ||
|
|
016cd6750c |
@@ -88,9 +88,8 @@ For security and performance reasons, function execution has certain limitations
|
||||
|
||||
### Outputs
|
||||
|
||||
- **Result**: The value returned by your function
|
||||
- **Standard Output**: Any console output from your function
|
||||
- **Execution Time**: The time taken to execute your function (in milliseconds)
|
||||
- **result**: The value returned by your function
|
||||
- **stdout**: Any console output from your function
|
||||
|
||||
## Example Usage
|
||||
|
||||
|
||||
@@ -115,14 +115,9 @@ Headers are configured as key-value pairs:
|
||||
</Tab>
|
||||
<Tab>
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li>
|
||||
<strong>response</strong>: Complete response object containing:
|
||||
<ul className="list-disc space-y-1 pl-6 mt-2">
|
||||
<li><strong>data</strong>: The response body data</li>
|
||||
<li><strong>status</strong>: HTTP status code</li>
|
||||
<li><strong>headers</strong>: Response headers</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><strong>data</strong>: The response body data</li>
|
||||
<li><strong>status</strong>: HTTP status code</li>
|
||||
<li><strong>headers</strong>: Response headers</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -66,17 +66,17 @@ Define the data to pass to the child workflow:
|
||||
|
||||
- **Single Variable Input**: Select a variable or block output to pass to the child workflow
|
||||
- **Variable References**: Use `<variable.name>` to reference workflow variables
|
||||
- **Block References**: Use `<blockName.response.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.response.input` in the child workflow
|
||||
- **Block References**: Use `<blockName.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.input` in the child workflow
|
||||
- **Optional**: The input field is optional - child workflows can run without input data
|
||||
- **Type Preservation**: Variable types (strings, numbers, objects, etc.) are preserved when passed to the child workflow
|
||||
|
||||
### Examples of Input References
|
||||
|
||||
- `<variable.customerData>` - Pass a workflow variable
|
||||
- `<dataProcessor.response.result>` - Pass the result from a previous block
|
||||
- `<start.response.input>` - Pass the original workflow input
|
||||
- `<apiCall.response.data.user>` - Pass a specific field from an API response
|
||||
- `<dataProcessor.result>` - Pass the result from a previous block
|
||||
- `<start.input>` - Pass the original workflow input
|
||||
- `<apiCall.data.user>` - Pass a specific field from an API response
|
||||
|
||||
### Execution Context
|
||||
|
||||
@@ -109,7 +109,7 @@ To prevent infinite recursion and ensure system stability, the Workflow block in
|
||||
<strong>Workflow ID</strong>: The identifier of the workflow to execute
|
||||
</li>
|
||||
<li>
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.response.field>`)
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.field>`)
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
@@ -150,23 +150,23 @@ blocks:
|
||||
- type: workflow
|
||||
name: "Setup Customer Account"
|
||||
workflowId: "account-setup-workflow"
|
||||
input: "<Validate Customer Data.response.result>"
|
||||
input: "<Validate Customer Data.result>"
|
||||
|
||||
- type: workflow
|
||||
name: "Send Welcome Email"
|
||||
workflowId: "welcome-email-workflow"
|
||||
input: "<Setup Customer Account.response.result.accountDetails>"
|
||||
input: "<Setup Customer Account.result.accountDetails>"
|
||||
```
|
||||
|
||||
### Child Workflow: Customer Validation
|
||||
```yaml
|
||||
# Reusable customer validation workflow
|
||||
# Access the input data using: start.response.input
|
||||
# Access the input data using: start.input
|
||||
blocks:
|
||||
- type: function
|
||||
name: "Validate Email"
|
||||
code: |
|
||||
const customerData = start.response.input;
|
||||
const customerData = start.input;
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
return emailRegex.test(customerData.email);
|
||||
|
||||
@@ -174,7 +174,7 @@ blocks:
|
||||
name: "Check Credit Score"
|
||||
url: "https://api.creditcheck.com/score"
|
||||
method: "POST"
|
||||
body: "<start.response.input>"
|
||||
body: "<start.input>"
|
||||
```
|
||||
|
||||
### Variable Reference Examples
|
||||
@@ -184,13 +184,13 @@ blocks:
|
||||
input: "<variable.customerInfo>"
|
||||
|
||||
# Using block outputs
|
||||
input: "<dataProcessor.response.cleanedData>"
|
||||
input: "<dataProcessor.cleanedData>"
|
||||
|
||||
# Using nested object properties
|
||||
input: "<apiCall.response.data.user.profile>"
|
||||
input: "<apiCall.data.user.profile>"
|
||||
|
||||
# Using array elements (if supported by the resolver)
|
||||
input: "<listProcessor.response.items[0]>"
|
||||
input: "<listProcessor.items[0]>"
|
||||
```
|
||||
|
||||
## Access Control and Permissions
|
||||
|
||||
@@ -81,4 +81,4 @@ Sim Studio provides a wide range of features designed to accelerate your develop
|
||||
|
||||
##
|
||||
|
||||
Ready to get started? Check out our [Getting Started](/getting-started) guide or explore our [Blocks](/docs/blocks) and [Tools](/docs/tools) in more detail.
|
||||
Ready to get started? Check out our [Getting Started](/getting-started) guide or explore our [Blocks](/blocks) and [Tools](/tools) in more detail.
|
||||
|
||||
@@ -182,10 +182,9 @@ Update multiple existing records in an Airtable table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `records` | json | records of the response |
|
||||
| ↳ `record` | json | record of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `records` | json | records output from the block |
|
||||
| `record` | json | record output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -174,11 +174,10 @@ Manage and render prompts using Autoblocks prompt management system
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `promptId` | string | promptId of the response |
|
||||
| ↳ `version` | string | version of the response |
|
||||
| ↳ `renderedPrompt` | string | renderedPrompt of the response |
|
||||
| ↳ `templates` | json | templates of the response |
|
||||
| `promptId` | string | promptId output from the block |
|
||||
| `version` | string | version output from the block |
|
||||
| `renderedPrompt` | string | renderedPrompt output from the block |
|
||||
| `templates` | json | templates output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -102,11 +102,10 @@ Runs a browser automation task using BrowserUse
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `output` | any | output of the response |
|
||||
| ↳ `steps` | json | steps of the response |
|
||||
| `id` | string | id output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `output` | any | output output from the block |
|
||||
| `steps` | json | steps output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -238,8 +238,7 @@ Populate Clay with data from a JSON file. Enables direct communication and notif
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
| `data` | any | data output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -113,12 +113,11 @@ Update a Confluence page using the Confluence API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `pageId` | string | pageId of the response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `title` | string | title of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `pageId` | string | pageId output from the block |
|
||||
| `content` | string | content output from the block |
|
||||
| `title` | string | title output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -150,9 +150,8 @@ Retrieve information about a Discord user
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
| `message` | string | message output from the block |
|
||||
| `data` | any | data output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -80,8 +80,7 @@ Convert TTS using ElevenLabs voices
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `audioUrl` | string | audioUrl of the response |
|
||||
| `audioUrl` | string | audioUrl output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -158,11 +158,10 @@ Get an AI-generated answer to a question with citations from the web using Exa A
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `similarLinks` | json | similarLinks of the response |
|
||||
| ↳ `answer` | string | answer of the response |
|
||||
| ↳ `citations` | json | citations of the response |
|
||||
| `results` | json | results output from the block |
|
||||
| `similarLinks` | json | similarLinks output from the block |
|
||||
| `answer` | string | answer output from the block |
|
||||
| `citations` | json | citations output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -87,9 +87,8 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `files` | json | files of the response |
|
||||
| ↳ `combinedContent` | string | combinedContent of the response |
|
||||
| `files` | json | files output from the block |
|
||||
| `combinedContent` | string | combinedContent output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -111,12 +111,11 @@ Search for information on the web using Firecrawl
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `markdown` | string | markdown of the response |
|
||||
| ↳ `html` | any | html of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `warning` | any | warning of the response |
|
||||
| `markdown` | string | markdown output from the block |
|
||||
| `html` | any | html output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `data` | json | data output from the block |
|
||||
| `warning` | any | warning output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -174,9 +174,8 @@ Retrieve the latest commit from a GitHub repository
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -130,9 +130,8 @@ No configuration parameters required.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -228,9 +228,8 @@ Invite attendees to an existing Google Calendar event
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -159,10 +159,9 @@ Create a new Google Docs document
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedContent` | boolean | updatedContent of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedContent` | boolean | updatedContent output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -177,9 +177,8 @@ List files and folders in Google Drive
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `file` | json | file of the response |
|
||||
| ↳ `files` | json | files of the response |
|
||||
| `file` | json | file output from the block |
|
||||
| `files` | json | files output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -101,7 +101,11 @@ Search the web with the Custom Search API
|
||||
|
||||
### Outputs
|
||||
|
||||
This block does not produce any outputs.
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `items` | json | items output from the block |
|
||||
| `searchInformation` | json | searchInformation output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
|
||||
@@ -212,14 +212,13 @@ Append data to the end of a Google Sheets spreadsheet
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedRange` | string | updatedRange of the response |
|
||||
| ↳ `updatedRows` | number | updatedRows of the response |
|
||||
| ↳ `updatedColumns` | number | updatedColumns of the response |
|
||||
| ↳ `updatedCells` | number | updatedCells of the response |
|
||||
| ↳ `tableRange` | string | tableRange of the response |
|
||||
| `data` | json | data output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedRange` | string | updatedRange output from the block |
|
||||
| `updatedRows` | number | updatedRows output from the block |
|
||||
| `updatedColumns` | number | updatedColumns output from the block |
|
||||
| `updatedCells` | number | updatedCells output from the block |
|
||||
| `tableRange` | string | tableRange output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -107,15 +107,14 @@ Search for guests in Guesty by phone number
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
| ↳ `guest` | json | guest of the response |
|
||||
| ↳ `checkIn` | string | checkIn of the response |
|
||||
| ↳ `checkOut` | string | checkOut of the response |
|
||||
| ↳ `status` | string | status of the response |
|
||||
| ↳ `listing` | json | listing of the response |
|
||||
| ↳ `money` | json | money of the response |
|
||||
| ↳ `guests` | json | guests of the response |
|
||||
| `id` | string | id output from the block |
|
||||
| `guest` | json | guest output from the block |
|
||||
| `checkIn` | string | checkIn output from the block |
|
||||
| `checkOut` | string | checkOut output from the block |
|
||||
| `status` | string | status output from the block |
|
||||
| `listing` | json | listing output from the block |
|
||||
| `money` | json | money output from the block |
|
||||
| `guests` | json | guests output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -115,10 +115,9 @@ Generate completions using Hugging Face Inference API
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -93,10 +93,9 @@ Generate images using OpenAI
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `image` | string | image of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `image` | string | image output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -101,8 +101,7 @@ Extract and process web content into clean, LLM-friendly text using Jina AI Read
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| `content` | string | content output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -165,15 +165,14 @@ Retrieve multiple Jira issues in bulk
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `issueKey` | string | issueKey of the response |
|
||||
| ↳ `summary` | string | summary of the response |
|
||||
| ↳ `description` | string | description of the response |
|
||||
| ↳ `created` | string | created of the response |
|
||||
| ↳ `updated` | string | updated of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `issueKey` | string | issueKey output from the block |
|
||||
| `summary` | string | summary output from the block |
|
||||
| `description` | string | description output from the block |
|
||||
| `created` | string | created output from the block |
|
||||
| `updated` | string | updated output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `url` | string | url output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -66,6 +66,13 @@ Search for similar content in one or more knowledge bases using vector similarit
|
||||
| `knowledgeBaseIds` | string | Yes | ID of the knowledge base to search in, or comma-separated IDs for multiple knowledge bases |
|
||||
| `query` | string | Yes | Search query text |
|
||||
| `topK` | number | No | Number of most similar results to return \(1-100\) |
|
||||
| `tag1` | string | No | Filter by tag 1 value |
|
||||
| `tag2` | string | No | Filter by tag 2 value |
|
||||
| `tag3` | string | No | Filter by tag 3 value |
|
||||
| `tag4` | string | No | Filter by tag 4 value |
|
||||
| `tag5` | string | No | Filter by tag 5 value |
|
||||
| `tag6` | string | No | Filter by tag 6 value |
|
||||
| `tag7` | string | No | Filter by tag 7 value |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -111,6 +118,13 @@ Create a new document in a knowledge base
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `name` | string | Yes | Name of the document |
|
||||
| `content` | string | Yes | Content of the document |
|
||||
| `tag1` | string | No | Tag 1 value for the document |
|
||||
| `tag2` | string | No | Tag 2 value for the document |
|
||||
| `tag3` | string | No | Tag 3 value for the document |
|
||||
| `tag4` | string | No | Tag 4 value for the document |
|
||||
| `tag5` | string | No | Tag 5 value for the document |
|
||||
| `tag6` | string | No | Tag 6 value for the document |
|
||||
| `tag7` | string | No | Tag 7 value for the document |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -135,10 +149,9 @@ Create a new document in a knowledge base
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `query` | string | query of the response |
|
||||
| ↳ `totalResults` | number | totalResults of the response |
|
||||
| `results` | json | results output from the block |
|
||||
| `query` | string | query output from the block |
|
||||
| `totalResults` | number | totalResults output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -105,9 +105,8 @@ Create a new issue in Linear
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `issues` | json | issues of the response |
|
||||
| ↳ `issue` | json | issue of the response |
|
||||
| `issues` | json | issues output from the block |
|
||||
| `issue` | json | issue output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -92,9 +92,8 @@ Search the web for information using Linkup
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `answer` | string | answer of the response |
|
||||
| ↳ `sources` | json | sources of the response |
|
||||
| `answer` | string | answer output from the block |
|
||||
| `sources` | json | sources output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -126,10 +126,9 @@ Retrieve memories from Mem0 by ID or filter criteria
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ids` | any | ids of the response |
|
||||
| ↳ `memories` | any | memories of the response |
|
||||
| ↳ `searchResults` | any | searchResults of the response |
|
||||
| `ids` | any | ids output from the block |
|
||||
| `memories` | any | memories output from the block |
|
||||
| `searchResults` | any | searchResults output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -124,9 +124,8 @@ Delete a specific memory by its ID
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `memories` | any | memories of the response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
| `memories` | any | memories output from the block |
|
||||
| `id` | string | id output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -180,15 +180,14 @@ Add new rows to a Microsoft Excel table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedRange` | string | updatedRange of the response |
|
||||
| ↳ `updatedRows` | number | updatedRows of the response |
|
||||
| ↳ `updatedColumns` | number | updatedColumns of the response |
|
||||
| ↳ `updatedCells` | number | updatedCells of the response |
|
||||
| ↳ `index` | number | index of the response |
|
||||
| ↳ `values` | json | values of the response |
|
||||
| `data` | json | data output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedRange` | string | updatedRange output from the block |
|
||||
| `updatedRows` | number | updatedRows output from the block |
|
||||
| `updatedColumns` | number | updatedColumns output from the block |
|
||||
| `updatedCells` | number | updatedCells output from the block |
|
||||
| `index` | number | index output from the block |
|
||||
| `values` | json | values output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -205,10 +205,9 @@ Write or send a message to a Microsoft Teams channel
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedContent` | boolean | updatedContent of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedContent` | boolean | updatedContent output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -122,9 +122,8 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -117,9 +117,8 @@ Create a new page in Notion
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | any | metadata of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | any | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -88,10 +88,9 @@ Generate embeddings from text using OpenAI
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `embeddings` | json | embeddings of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
| `embeddings` | json | embeddings output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -225,9 +225,8 @@ Read emails from Outlook
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| `message` | string | message output from the block |
|
||||
| `results` | json | results output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -83,10 +83,9 @@ Generate completions using Perplexity AI chat models
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -181,13 +181,12 @@ Fetch vectors by ID from a Pinecone index
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `matches` | any | matches of the response |
|
||||
| ↳ `upsertedCount` | any | upsertedCount of the response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
| ↳ `model` | any | model of the response |
|
||||
| ↳ `vector_type` | any | vector_type of the response |
|
||||
| ↳ `usage` | any | usage of the response |
|
||||
| `matches` | any | matches output from the block |
|
||||
| `upsertedCount` | any | upsertedCount output from the block |
|
||||
| `data` | any | data output from the block |
|
||||
| `model` | any | model output from the block |
|
||||
| `vector_type` | any | vector_type output from the block |
|
||||
| `usage` | any | usage output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -129,11 +129,10 @@ Fetch comments from a specific Reddit post
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `subreddit` | string | subreddit of the response |
|
||||
| ↳ `posts` | json | posts of the response |
|
||||
| ↳ `post` | json | post of the response |
|
||||
| ↳ `comments` | json | comments of the response |
|
||||
| `subreddit` | string | subreddit output from the block |
|
||||
| `posts` | json | posts output from the block |
|
||||
| `post` | json | post output from the block |
|
||||
| `comments` | json | comments output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -89,9 +89,8 @@ Retrieve an object from an AWS S3 bucket
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| `url` | string | url output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,8 +121,7 @@ A powerful web search tool that provides access to Google search results through
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `searchResults` | json | searchResults of the response |
|
||||
| `searchResults` | json | searchResults output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -98,9 +98,8 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `channel` | string | channel of the response |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `channel` | string | channel output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -232,8 +232,7 @@ Extract structured data from a webpage using Stagehand
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| `data` | json | data output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -240,9 +240,8 @@ Run an autonomous web agent to complete tasks and extract structured data
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `agentResult` | json | agentResult of the response |
|
||||
| ↳ `structuredOutput` | any | structuredOutput of the response |
|
||||
| `agentResult` | json | agentResult output from the block |
|
||||
| `structuredOutput` | any | structuredOutput output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -127,9 +127,8 @@ Insert data into a Supabase table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| `message` | string | message output from the block |
|
||||
| `results` | json | results output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,13 +121,12 @@ Extract raw content from multiple web pages simultaneously using Tavily
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `answer` | any | answer of the response |
|
||||
| ↳ `query` | string | query of the response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `title` | string | title of the response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
| `results` | json | results output from the block |
|
||||
| `answer` | any | answer output from the block |
|
||||
| `query` | string | query output from the block |
|
||||
| `content` | string | content output from the block |
|
||||
| `title` | string | title output from the block |
|
||||
| `url` | string | url output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,9 +121,8 @@ Send messages to Telegram channels or users through the Telegram Bot API. Enable
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ok` | boolean | ok of the response |
|
||||
| ↳ `result` | json | result of the response |
|
||||
| `ok` | boolean | ok output from the block |
|
||||
| `result` | json | result output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -87,8 +87,7 @@ Processes a provided thought/instruction, making it available for subsequent ste
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `acknowledgedThought` | string | acknowledgedThought of the response |
|
||||
| `acknowledgedThought` | string | acknowledgedThought output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -95,10 +95,9 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `tokens` | any | tokens of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `tokens` | any | tokens output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -78,11 +78,10 @@ Send text messages to single or multiple recipients using the Twilio API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `messageId` | any | messageId of the response |
|
||||
| ↳ `status` | any | status of the response |
|
||||
| ↳ `error` | any | error of the response |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `messageId` | any | messageId output from the block |
|
||||
| `status` | any | status output from the block |
|
||||
| `error` | any | error output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -126,10 +126,9 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `total_items` | number | total_items of the response |
|
||||
| ↳ `page_count` | number | page_count of the response |
|
||||
| ↳ `items` | json | items of the response |
|
||||
| `total_items` | number | total_items output from the block |
|
||||
| `page_count` | number | page_count output from the block |
|
||||
| `items` | json | items output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -90,10 +90,9 @@ Process and analyze images using advanced vision models. Capable of understandin
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | any | model of the response |
|
||||
| ↳ `tokens` | any | tokens of the response |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | any | model output from the block |
|
||||
| `tokens` | any | tokens output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -79,10 +79,9 @@ Send WhatsApp messages
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `messageId` | any | messageId of the response |
|
||||
| ↳ `error` | any | error of the response |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `messageId` | any | messageId output from the block |
|
||||
| `error` | any | error output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -145,15 +145,14 @@ Get user profile information
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `tweet` | json | tweet of the response |
|
||||
| ↳ `replies` | any | replies of the response |
|
||||
| ↳ `context` | any | context of the response |
|
||||
| ↳ `tweets` | json | tweets of the response |
|
||||
| ↳ `includes` | any | includes of the response |
|
||||
| ↳ `meta` | json | meta of the response |
|
||||
| ↳ `user` | json | user of the response |
|
||||
| ↳ `recentTweets` | any | recentTweets of the response |
|
||||
| `tweet` | json | tweet output from the block |
|
||||
| `replies` | any | replies output from the block |
|
||||
| `context` | any | context output from the block |
|
||||
| `tweets` | json | tweets output from the block |
|
||||
| `includes` | any | includes output from the block |
|
||||
| `meta` | json | meta output from the block |
|
||||
| `user` | json | user output from the block |
|
||||
| `recentTweets` | any | recentTweets output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -82,9 +82,8 @@ Search for videos on YouTube using the YouTube Data API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `items` | json | items of the response |
|
||||
| ↳ `totalResults` | number | totalResults of the response |
|
||||
| `items` | json | items output from the block |
|
||||
| `totalResults` | number | totalResults output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"fumadocs-mdx": "^11.5.6",
|
||||
"fumadocs-ui": "^15.0.16",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next": "^15.2.3",
|
||||
"next": "^15.3.2",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -15,5 +15,3 @@ ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate
|
||||
# RESEND_API_KEY= # Uncomment and add your key from https://resend.com to send actual emails
|
||||
# If left commented out, emails will be logged to console instead
|
||||
|
||||
# Freestyle API Key (Required for sandboxed code execution for functions/custom-tools)
|
||||
# FREESTYLE_API_KEY= # Uncomment and add your key from https://docs.freestyle.sh/Getting-Started/run
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { z } from 'zod'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
|
||||
const emailSchema = z.string().email('Please enter a valid email')
|
||||
|
||||
export default function WaitlistForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [status, setStatus] = useState<'idle' | 'success' | 'error' | 'exists' | 'ratelimited'>(
|
||||
'idle'
|
||||
)
|
||||
const [_errorMessage, setErrorMessage] = useState('')
|
||||
const [_retryAfter, setRetryAfter] = useState<number | null>(null)
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
setStatus('idle')
|
||||
setErrorMessage('')
|
||||
setRetryAfter(null)
|
||||
|
||||
try {
|
||||
// Validate email
|
||||
emailSchema.parse(email)
|
||||
|
||||
setIsSubmitting(true)
|
||||
const response = await fetch('/api/waitlist', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email }),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
// Check for rate limiting (429 status)
|
||||
if (response.status === 429) {
|
||||
setStatus('ratelimited')
|
||||
setErrorMessage(data.message || 'Too many attempts. Please try again later.')
|
||||
setRetryAfter(data.retryAfter || 60)
|
||||
}
|
||||
// Check if the error is because the email already exists
|
||||
else if (response.status === 400 && data.message?.includes('already exists')) {
|
||||
setStatus('exists')
|
||||
setErrorMessage('Already on the waitlist')
|
||||
} else {
|
||||
setStatus('error')
|
||||
setErrorMessage(data.message || 'Failed to join waitlist')
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
setStatus('success')
|
||||
setEmail('')
|
||||
} catch (_error) {
|
||||
setStatus('error')
|
||||
setErrorMessage('Please try again')
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const getButtonText = () => {
|
||||
if (isSubmitting) return 'Joining...'
|
||||
if (status === 'success') return 'Joined!'
|
||||
if (status === 'error') return 'Try again'
|
||||
if (status === 'exists') return 'Already joined'
|
||||
if (status === 'ratelimited') return 'Try again later'
|
||||
return 'Join waitlist'
|
||||
}
|
||||
|
||||
const getButtonStyle = () => {
|
||||
switch (status) {
|
||||
case 'success':
|
||||
return 'bg-green-500 hover:bg-green-600'
|
||||
case 'error':
|
||||
return 'bg-red-500 hover:bg-red-600'
|
||||
case 'exists':
|
||||
return 'bg-amber-500 hover:bg-amber-600'
|
||||
case 'ratelimited':
|
||||
return 'bg-gray-500 hover:bg-gray-600'
|
||||
default:
|
||||
return 'bg-white text-black hover:bg-gray-100'
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className='mx-auto mt-8 flex max-w-lg flex-col items-center gap-3'
|
||||
>
|
||||
<div className='flex w-full gap-3'>
|
||||
<Input
|
||||
type='email'
|
||||
placeholder='you@example.com'
|
||||
className='h-[49px] flex-1 rounded-md border-white/20 bg-[#020817] text-sm focus:border-white/30 focus:ring-white/30 md:text-md lg:text-[16px]'
|
||||
value={email}
|
||||
onChange={(e) => setEmail(e.target.value)}
|
||||
disabled={isSubmitting || status === 'ratelimited'}
|
||||
/>
|
||||
<Button
|
||||
type='submit'
|
||||
className={`h-[48px] rounded-md px-8 text-sm md:text-md ${getButtonStyle()}`}
|
||||
disabled={isSubmitting || status === 'ratelimited'}
|
||||
>
|
||||
{getButtonText()}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -93,7 +93,7 @@ export const sampleWorkflowState = {
|
||||
webhookPath: { id: 'webhookPath', type: 'short-input', value: '' },
|
||||
},
|
||||
outputs: {
|
||||
response: { type: { input: 'any' } },
|
||||
input: 'any',
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
@@ -111,7 +111,7 @@ export const sampleWorkflowState = {
|
||||
type: 'long-input',
|
||||
value: 'You are a helpful assistant',
|
||||
},
|
||||
context: { id: 'context', type: 'short-input', value: '<start.response.input>' },
|
||||
context: { id: 'context', type: 'short-input', value: '<start.input>' },
|
||||
model: { id: 'model', type: 'dropdown', value: 'gpt-4o' },
|
||||
apiKey: { id: 'apiKey', type: 'short-input', value: '{{OPENAI_API_KEY}}' },
|
||||
},
|
||||
@@ -138,6 +138,7 @@ export const sampleWorkflowState = {
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
}
|
||||
@@ -618,6 +619,13 @@ export function mockKnowledgeSchemas() {
|
||||
processingCompletedAt: 'processing_completed_at',
|
||||
processingError: 'processing_error',
|
||||
enabled: 'enabled',
|
||||
tag1: 'tag1',
|
||||
tag2: 'tag2',
|
||||
tag3: 'tag3',
|
||||
tag4: 'tag4',
|
||||
tag5: 'tag5',
|
||||
tag6: 'tag6',
|
||||
tag7: 'tag7',
|
||||
uploadedAt: 'uploaded_at',
|
||||
deletedAt: 'deleted_at',
|
||||
},
|
||||
@@ -630,6 +638,13 @@ export function mockKnowledgeSchemas() {
|
||||
embedding: 'embedding',
|
||||
tokenCount: 'token_count',
|
||||
characterCount: 'character_count',
|
||||
tag1: 'tag1',
|
||||
tag2: 'tag2',
|
||||
tag3: 'tag3',
|
||||
tag4: 'tag4',
|
||||
tag5: 'tag5',
|
||||
tag6: 'tag6',
|
||||
tag7: 'tag7',
|
||||
createdAt: 'created_at',
|
||||
},
|
||||
}))
|
||||
@@ -764,6 +779,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +835,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -14,6 +14,8 @@ const logger = createLogger('OAuthTokenAPI')
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
logger.info(`[${requestId}] OAuth token API POST request received`)
|
||||
|
||||
try {
|
||||
// Parse request body
|
||||
const body = await request.json()
|
||||
@@ -38,6 +40,7 @@ export async function POST(request: NextRequest) {
|
||||
const credential = await getCredential(requestId, credentialId, userId)
|
||||
|
||||
if (!credential) {
|
||||
logger.error(`[${requestId}] Credential not found: ${credentialId}`)
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
@@ -45,7 +48,8 @@ export async function POST(request: NextRequest) {
|
||||
// Refresh the token if needed
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
return NextResponse.json({ accessToken }, { status: 200 })
|
||||
} catch (_error) {
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to refresh access token:`, error)
|
||||
return NextResponse.json({ error: 'Failed to refresh access token' }, { status: 401 })
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -89,6 +89,7 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
// Check if the token is expired and needs refreshing
|
||||
const now = new Date()
|
||||
const tokenExpiry = credential.accessTokenExpiresAt
|
||||
// Only refresh if we have an expiration time AND it's expired AND we have a refresh token
|
||||
const needsRefresh = tokenExpiry && tokenExpiry < now && !!credential.refreshToken
|
||||
|
||||
if (needsRefresh) {
|
||||
@@ -166,7 +167,9 @@ export async function refreshAccessTokenIfNeeded(
|
||||
// Check if we need to refresh the token
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
const needsRefresh = !expiresAt || expiresAt <= now
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
@@ -233,7 +236,9 @@ export async function refreshTokenIfNeeded(
|
||||
// Check if we need to refresh the token
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
const needsRefresh = !expiresAt || expiresAt <= now
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
|
||||
// If token is still valid, return it directly
|
||||
if (!needsRefresh || !credential.refreshToken) {
|
||||
|
||||
109
apps/sim/app/api/billing/daily/route.ts
Normal file
109
apps/sim/app/api/billing/daily/route.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { processDailyBillingCheck } from '@/lib/billing/core/billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('DailyBillingCron')
|
||||
|
||||
/**
|
||||
* Daily billing CRON job endpoint that checks individual billing periods
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
logger.info('Starting daily billing check cron job')
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
// Process overage billing for users and organizations with periods ending today
|
||||
const result = await processDailyBillingCheck()
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Daily billing check completed successfully', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error('Daily billing check completed with errors', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
errors: result.errors,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
errors: result.errors,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in monthly billing cron job', { error })
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Internal server error during daily billing check',
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for manual testing and health checks
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check health check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
status: 'ready',
|
||||
message:
|
||||
'Daily billing check cron job is ready to process users and organizations with periods ending today',
|
||||
currentDate: new Date().toISOString().split('T')[0],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error in billing health check', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
status: 'error',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
116
apps/sim/app/api/billing/route.ts
Normal file
116
apps/sim/app/api/billing/route.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getSimplifiedBillingSummary } from '@/lib/billing/core/billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('UnifiedBillingAPI')
|
||||
|
||||
/**
|
||||
* Unified Billing Endpoint
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
|
||||
try {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const context = searchParams.get('context') || 'user'
|
||||
const contextId = searchParams.get('id')
|
||||
|
||||
// Validate context parameter
|
||||
if (!['user', 'organization'].includes(context)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid context. Must be "user" or "organization"' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// For organization context, require contextId
|
||||
if (context === 'organization' && !contextId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization ID is required when context=organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let billingData
|
||||
|
||||
if (context === 'user') {
|
||||
// Get user billing (may include organization if they're part of one)
|
||||
billingData = await getSimplifiedBillingSummary(session.user.id, contextId || undefined)
|
||||
} else {
|
||||
// Get user role in organization for permission checks first
|
||||
const memberRecord = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, contextId!), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberRecord.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access denied - not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization-specific billing
|
||||
const rawBillingData = await getOrganizationBillingData(contextId!)
|
||||
|
||||
if (!rawBillingData) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization not found or access denied' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
// Transform data to match component expectations
|
||||
billingData = {
|
||||
organizationId: rawBillingData.organizationId,
|
||||
organizationName: rawBillingData.organizationName,
|
||||
subscriptionPlan: rawBillingData.subscriptionPlan,
|
||||
subscriptionStatus: rawBillingData.subscriptionStatus,
|
||||
totalSeats: rawBillingData.totalSeats,
|
||||
usedSeats: rawBillingData.usedSeats,
|
||||
totalCurrentUsage: rawBillingData.totalCurrentUsage,
|
||||
totalUsageLimit: rawBillingData.totalUsageLimit,
|
||||
averageUsagePerMember: rawBillingData.averageUsagePerMember,
|
||||
billingPeriodStart: rawBillingData.billingPeriodStart?.toISOString() || null,
|
||||
billingPeriodEnd: rawBillingData.billingPeriodEnd?.toISOString() || null,
|
||||
members: rawBillingData.members.map((member) => ({
|
||||
...member,
|
||||
joinedAt: member.joinedAt.toISOString(),
|
||||
lastActive: member.lastActive?.toISOString() || null,
|
||||
})),
|
||||
}
|
||||
|
||||
const userRole = memberRecord[0].role
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
data: billingData,
|
||||
userRole,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
data: billingData,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get billing data', {
|
||||
userId: session?.user?.id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
116
apps/sim/app/api/billing/webhooks/stripe/route.ts
Normal file
116
apps/sim/app/api/billing/webhooks/stripe/route.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { headers } from 'next/headers'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type Stripe from 'stripe'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { handleInvoiceWebhook } from '@/lib/billing/webhooks/stripe-invoice-webhooks'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('StripeInvoiceWebhook')
|
||||
|
||||
/**
|
||||
* Stripe billing webhook endpoint for invoice-related events
|
||||
* Endpoint: /api/billing/webhooks/stripe
|
||||
* Handles: invoice.payment_succeeded, invoice.payment_failed, invoice.finalized
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.text()
|
||||
const headersList = await headers()
|
||||
const signature = headersList.get('stripe-signature')
|
||||
|
||||
if (!signature) {
|
||||
logger.error('Missing Stripe signature header')
|
||||
return NextResponse.json({ error: 'Missing Stripe signature' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!env.STRIPE_WEBHOOK_SECRET) {
|
||||
logger.error('Missing Stripe webhook secret configuration')
|
||||
return NextResponse.json({ error: 'Webhook secret not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
// Check if Stripe client is available
|
||||
let stripe
|
||||
try {
|
||||
stripe = requireStripeClient()
|
||||
} catch (stripeError) {
|
||||
logger.error('Stripe client not available for webhook processing', {
|
||||
error: stripeError,
|
||||
})
|
||||
return NextResponse.json({ error: 'Stripe client not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
// Verify webhook signature
|
||||
let event: Stripe.Event
|
||||
try {
|
||||
event = stripe.webhooks.constructEvent(body, signature, env.STRIPE_WEBHOOK_SECRET)
|
||||
} catch (signatureError) {
|
||||
logger.error('Invalid Stripe webhook signature', {
|
||||
error: signatureError,
|
||||
signature,
|
||||
})
|
||||
return NextResponse.json({ error: 'Invalid signature' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Received Stripe invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
})
|
||||
|
||||
// Handle specific invoice events
|
||||
const supportedEvents = [
|
||||
'invoice.payment_succeeded',
|
||||
'invoice.payment_failed',
|
||||
'invoice.finalized',
|
||||
]
|
||||
|
||||
if (supportedEvents.includes(event.type)) {
|
||||
try {
|
||||
await handleInvoiceWebhook(event)
|
||||
|
||||
logger.info('Successfully processed invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
})
|
||||
|
||||
return NextResponse.json({ received: true })
|
||||
} catch (processingError) {
|
||||
logger.error('Failed to process invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
error: processingError,
|
||||
})
|
||||
|
||||
// Return 500 to tell Stripe to retry the webhook
|
||||
return NextResponse.json({ error: 'Failed to process webhook' }, { status: 500 })
|
||||
}
|
||||
} else {
|
||||
// Not a supported invoice event, ignore
|
||||
logger.info('Ignoring unsupported webhook event', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
supportedEvents,
|
||||
})
|
||||
|
||||
return NextResponse.json({ received: true })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in invoice webhook handler', {
|
||||
error,
|
||||
url: request.url,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for webhook health checks
|
||||
*/
|
||||
export async function GET() {
|
||||
return NextResponse.json({
|
||||
status: 'healthy',
|
||||
webhook: 'stripe-invoices',
|
||||
events: ['invoice.payment_succeeded', 'invoice.payment_failed', 'invoice.finalized'],
|
||||
})
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
import { render } from '@react-email/render'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import OTPVerificationEmail from '@/components/emails/otp-verification-email'
|
||||
import { renderOTPEmail } from '@/components/emails/render-email'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getRedisClient, markMessageAsProcessed, releaseLock } from '@/lib/redis'
|
||||
@@ -158,7 +157,6 @@ export async function POST(
|
||||
? deployment.allowedEmails
|
||||
: []
|
||||
|
||||
// Check if the email is allowed
|
||||
const isEmailAllowed =
|
||||
allowedEmails.includes(email) ||
|
||||
allowedEmails.some((allowed: string) => {
|
||||
@@ -176,24 +174,17 @@ export async function POST(
|
||||
)
|
||||
}
|
||||
|
||||
// Generate OTP
|
||||
const otp = generateOTP()
|
||||
|
||||
// Store OTP in Redis - AWAIT THIS BEFORE RETURNING RESPONSE
|
||||
await storeOTP(email, deployment.id, otp)
|
||||
|
||||
// Create the email
|
||||
const emailContent = OTPVerificationEmail({
|
||||
const emailHtml = await renderOTPEmail(
|
||||
otp,
|
||||
email,
|
||||
type: 'chat-access',
|
||||
chatTitle: deployment.title || 'Chat',
|
||||
})
|
||||
'email-verification',
|
||||
deployment.title || 'Chat'
|
||||
)
|
||||
|
||||
// await the render function
|
||||
const emailHtml = await render(emailContent)
|
||||
|
||||
// MAKE SURE TO AWAIT THE EMAIL SENDING
|
||||
const emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: `Verification code for ${deployment.title || 'Chat'}`,
|
||||
|
||||
@@ -241,7 +241,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
describe('POST endpoint', () => {
|
||||
it('should handle authentication requests without messages', async () => {
|
||||
it('should handle authentication requests without input', async () => {
|
||||
const req = createMockRequest('POST', { password: 'test-password' })
|
||||
const params = Promise.resolve({ subdomain: 'password-protected-chat' })
|
||||
|
||||
@@ -257,7 +257,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
expect(mockSetChatAuthCookie).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return 400 for requests without message', async () => {
|
||||
it('should return 400 for requests without input', async () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
@@ -269,7 +269,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data).toHaveProperty('message', 'No message provided')
|
||||
expect(data).toHaveProperty('message', 'No input provided')
|
||||
})
|
||||
|
||||
it('should return 401 for unauthorized access', async () => {
|
||||
@@ -279,7 +279,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
error: 'Authentication required',
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'protected-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -342,7 +342,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -357,7 +357,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should return streaming response for valid chat messages', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world', conversationId: 'conv-123' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -374,7 +374,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle streaming response body correctly', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -404,7 +404,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
throw new Error('Execution failed')
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Trigger error' })
|
||||
const req = createMockRequest('POST', { input: 'Trigger error' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -444,7 +444,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
it('should pass conversationId to executeWorkflowForChat when provided', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
message: 'Hello world',
|
||||
input: 'Hello world',
|
||||
conversationId: 'test-conversation-123',
|
||||
})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
@@ -461,7 +461,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle missing conversationId gracefully', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
@@ -72,11 +72,11 @@ export async function POST(
|
||||
}
|
||||
|
||||
// Use the already parsed body
|
||||
const { message, password, email, conversationId } = parsedBody
|
||||
const { input, password, email, conversationId } = parsedBody
|
||||
|
||||
// If this is an authentication request (has password or email but no message),
|
||||
// If this is an authentication request (has password or email but no input),
|
||||
// set auth cookie and return success
|
||||
if ((password || email) && !message) {
|
||||
if ((password || email) && !input) {
|
||||
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
||||
|
||||
// Set authentication cookie
|
||||
@@ -86,8 +86,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
// For chat messages, create regular response
|
||||
if (!message) {
|
||||
return addCorsHeaders(createErrorResponse('No message provided', 400), request)
|
||||
if (!input) {
|
||||
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
||||
}
|
||||
|
||||
// Get the workflow for this chat
|
||||
@@ -105,8 +105,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
try {
|
||||
// Execute workflow with structured input (message + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, message, conversationId)
|
||||
// Execute workflow with structured input (input + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, input, conversationId)
|
||||
|
||||
// The result is always a ReadableStream that we can pipe to the client
|
||||
const streamResponse = new NextResponse(result, {
|
||||
@@ -194,6 +194,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
@@ -219,6 +220,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
|
||||
@@ -3,8 +3,9 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionLogs } from '@/lib/logs/execution-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { db } from '@/db'
|
||||
import { chat, environment as envTable, userStats, workflow } from '@/db/schema'
|
||||
@@ -128,10 +129,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Password is required' }
|
||||
}
|
||||
|
||||
const { password, message } = parsedBody
|
||||
const { password, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !password) {
|
||||
if (input && !password) {
|
||||
return { authorized: false, error: 'auth_required_password' }
|
||||
}
|
||||
|
||||
@@ -170,10 +171,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Email is required' }
|
||||
}
|
||||
|
||||
const { email, message } = parsedBody
|
||||
const { email, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !email) {
|
||||
if (input && !email) {
|
||||
return { authorized: false, error: 'auth_required_email' }
|
||||
}
|
||||
|
||||
@@ -211,17 +212,17 @@ export async function validateChatAuth(
|
||||
/**
|
||||
* Executes a workflow for a chat request and returns the formatted output.
|
||||
*
|
||||
* When workflows reference <start.response.input>, they receive a structured JSON
|
||||
* containing both the message and conversationId for maintaining chat context.
|
||||
* When workflows reference <start.input>, they receive the input directly.
|
||||
* The conversationId is available at <start.conversationId> for maintaining chat context.
|
||||
*
|
||||
* @param chatId - Chat deployment identifier
|
||||
* @param message - User's chat message
|
||||
* @param input - User's chat input
|
||||
* @param conversationId - Optional ID for maintaining conversation context
|
||||
* @returns Workflow execution result formatted for the chat interface
|
||||
*/
|
||||
export async function executeWorkflowForChat(
|
||||
chatId: string,
|
||||
message: string,
|
||||
input: string,
|
||||
conversationId?: string
|
||||
): Promise<any> {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
@@ -252,32 +253,42 @@ export async function executeWorkflowForChat(
|
||||
|
||||
const deployment = deploymentResult[0]
|
||||
const workflowId = deployment.workflowId
|
||||
const executionId = uuidv4()
|
||||
|
||||
// Set up enhanced logging for chat execution
|
||||
const loggingSession = new EnhancedLoggingSession(workflowId, executionId, 'chat', requestId)
|
||||
|
||||
// Check for multi-output configuration in customizations
|
||||
const customizations = (deployment.customizations || {}) as Record<string, any>
|
||||
let outputBlockIds: string[] = []
|
||||
let outputPaths: string[] = []
|
||||
|
||||
// Extract output configs from the new schema format
|
||||
let selectedOutputIds: string[] = []
|
||||
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
||||
// Extract block IDs and paths from the new outputConfigs array format
|
||||
// Extract output IDs in the format expected by the streaming processor
|
||||
logger.debug(
|
||||
`[${requestId}] Found ${deployment.outputConfigs.length} output configs in deployment`
|
||||
)
|
||||
deployment.outputConfigs.forEach((config) => {
|
||||
|
||||
selectedOutputIds = deployment.outputConfigs.map((config) => {
|
||||
const outputId = config.path
|
||||
? `${config.blockId}_${config.path}`
|
||||
: `${config.blockId}.content`
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'none'}`
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'content'} -> outputId=${outputId}`
|
||||
)
|
||||
|
||||
return outputId
|
||||
})
|
||||
|
||||
// Also extract block IDs for legacy compatibility
|
||||
outputBlockIds = deployment.outputConfigs.map((config) => config.blockId)
|
||||
outputPaths = deployment.outputConfigs.map((config) => config.path || '')
|
||||
} else {
|
||||
// Use customizations as fallback
|
||||
outputBlockIds = Array.isArray(customizations.outputBlockIds)
|
||||
? customizations.outputBlockIds
|
||||
: []
|
||||
outputPaths = Array.isArray(customizations.outputPaths) ? customizations.outputPaths : []
|
||||
}
|
||||
|
||||
// Fall back to customizations if we still have no outputs
|
||||
@@ -287,10 +298,11 @@ export async function executeWorkflowForChat(
|
||||
customizations.outputBlockIds.length > 0
|
||||
) {
|
||||
outputBlockIds = customizations.outputBlockIds
|
||||
outputPaths = customizations.outputPaths || new Array(outputBlockIds.length).fill('')
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
logger.debug(
|
||||
`[${requestId}] Using ${outputBlockIds.length} output blocks and ${selectedOutputIds.length} selected output IDs for extraction`
|
||||
)
|
||||
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
@@ -407,6 +419,13 @@ export async function executeWorkflowForChat(
|
||||
{} as Record<string, Record<string, any>>
|
||||
)
|
||||
|
||||
// Start enhanced logging session
|
||||
await loggingSession.safeStart({
|
||||
userId: deployment.userId,
|
||||
workspaceId: '', // TODO: Get from workflow
|
||||
variables: workflowVariables,
|
||||
})
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder()
|
||||
@@ -445,11 +464,11 @@ export async function executeWorkflowForChat(
|
||||
workflow: serializedWorkflow,
|
||||
currentBlockStates: processedBlockStates,
|
||||
envVarValues: decryptedEnvVars,
|
||||
workflowInput: { input: message, conversationId },
|
||||
workflowInput: { input: input, conversationId },
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
selectedOutputIds: outputBlockIds,
|
||||
selectedOutputIds: selectedOutputIds.length > 0 ? selectedOutputIds : outputBlockIds,
|
||||
edges: edges.map((e: any) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
@@ -458,16 +477,41 @@ export async function executeWorkflowForChat(
|
||||
},
|
||||
})
|
||||
|
||||
const result = await executor.execute(workflowId)
|
||||
// Set up enhanced logging on the executor
|
||||
loggingSession.setupExecutor(executor)
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await executor.execute(workflowId)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Chat workflow execution failed:`, error)
|
||||
await loggingSession.safeCompleteWithError({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
error: {
|
||||
message: error.message || 'Chat workflow execution failed',
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
if (result && 'success' in result) {
|
||||
result.logs?.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
if (log.output?.response) {
|
||||
log.output.response.content = streamedContent.get(log.blockId)
|
||||
// Update streamed content and apply tokenization
|
||||
if (result.logs) {
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId)
|
||||
if (log.output) {
|
||||
log.output.content = content
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Process all logs for streaming tokenization
|
||||
const processedCount = processStreamingBlockLogs(result.logs, streamedContent)
|
||||
logger.info(`[CHAT-API] Processed ${processedCount} blocks for streaming tokenization`)
|
||||
}
|
||||
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(result)
|
||||
const enrichedResult = { ...result, traceSpans, totalDuration }
|
||||
@@ -481,8 +525,7 @@ export async function executeWorkflowForChat(
|
||||
;(enrichedResult.metadata as any).conversationId = conversationId
|
||||
}
|
||||
const executionId = uuidv4()
|
||||
await persistExecutionLogs(workflowId, executionId, enrichedResult, 'chat')
|
||||
logger.debug(`Persisted logs for deployed chat: ${executionId}`)
|
||||
logger.debug(`Generated execution ID for deployed chat: ${executionId}`)
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
@@ -506,6 +549,17 @@ export async function executeWorkflowForChat(
|
||||
)
|
||||
}
|
||||
|
||||
// Complete enhanced logging session (for both success and failure)
|
||||
if (result && 'success' in result) {
|
||||
const { traceSpans } = buildTraceSpans(result)
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: result.metadata?.duration || 0,
|
||||
finalOutput: result.output,
|
||||
traceSpans,
|
||||
})
|
||||
}
|
||||
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
@@ -239,7 +239,7 @@ Example Scenario:
|
||||
User Prompt: "Fetch user data from an API. Use the User ID passed in as 'userId' and an API Key stored as the 'SERVICE_API_KEY' environment variable."
|
||||
|
||||
Generated Code:
|
||||
const userId = <block.response.content>; // Correct: Accessing input parameter without quotes
|
||||
const userId = <block.content>; // Correct: Accessing input parameter without quotes
|
||||
const apiKey = {{SERVICE_API_KEY}}; // Correct: Accessing environment variable without quotes
|
||||
const url = \`https://api.example.com/users/\${userId}\`;
|
||||
|
||||
@@ -273,7 +273,7 @@ Do not include import/require statements unless absolutely necessary and they ar
|
||||
Do not include markdown formatting or explanations.
|
||||
Output only the raw TypeScript code. Use modern TypeScript features where appropriate. Do not use semicolons.
|
||||
Example:
|
||||
const userId = <block.response.content> as string
|
||||
const userId = <block.content> as string
|
||||
const apiKey = {{SERVICE_API_KEY}}
|
||||
const response = await fetch(\`https://api.example.com/users/\${userId}\`, { headers: { Authorization: \`Bearer \${apiKey}\` } })
|
||||
if (!response.ok) {
|
||||
|
||||
281
apps/sim/app/api/copilot/docs/route.ts
Normal file
281
apps/sim/app/api/copilot/docs/route.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
type CopilotChat,
|
||||
type CopilotMessage,
|
||||
createChat,
|
||||
generateChatTitle,
|
||||
generateDocsResponse,
|
||||
getChat,
|
||||
updateChat,
|
||||
} from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('CopilotDocsAPI')
|
||||
|
||||
// Schema for docs queries
|
||||
const DocsQuerySchema = z.object({
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
topK: z.number().min(1).max(20).default(5),
|
||||
provider: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
stream: z.boolean().optional().default(false),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/docs
|
||||
* Ask questions about documentation using RAG
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { query, topK, provider, model, stream, chatId, workflowId, createNewChat } =
|
||||
DocsQuerySchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Docs RAG query: "${query}"`, {
|
||||
provider,
|
||||
model,
|
||||
topK,
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
// Handle chat context
|
||||
let currentChat: CopilotChat | null = null
|
||||
let conversationHistory: CopilotMessage[] = []
|
||||
|
||||
if (chatId) {
|
||||
// Load existing chat
|
||||
currentChat = await getChat(chatId, session.user.id)
|
||||
if (currentChat) {
|
||||
conversationHistory = currentChat.messages
|
||||
}
|
||||
} else if (createNewChat && workflowId) {
|
||||
// Create new chat
|
||||
currentChat = await createChat(session.user.id, workflowId)
|
||||
}
|
||||
|
||||
// Generate docs response
|
||||
const result = await generateDocsResponse(query, conversationHistory, {
|
||||
topK,
|
||||
provider,
|
||||
model,
|
||||
stream,
|
||||
workflowId,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (stream && result.response instanceof ReadableStream) {
|
||||
// Handle streaming response with docs sources
|
||||
logger.info(`[${requestId}] Returning streaming docs response`)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = (result.response as ReadableStream).getReader()
|
||||
let accumulatedResponse = ''
|
||||
|
||||
try {
|
||||
// Send initial metadata including sources
|
||||
const metadata = {
|
||||
type: 'metadata',
|
||||
chatId: currentChat?.id,
|
||||
sources: result.sources,
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
metadata: {
|
||||
requestId,
|
||||
chunksFound: result.sources.length,
|
||||
query,
|
||||
topSimilarity: result.sources[0]?.similarity,
|
||||
provider,
|
||||
model,
|
||||
},
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
const chunk = new TextDecoder().decode(value)
|
||||
// Clean up any object serialization artifacts in streaming content
|
||||
const cleanedChunk = chunk.replace(/\[object Object\],?/g, '')
|
||||
accumulatedResponse += cleanedChunk
|
||||
|
||||
const contentChunk = {
|
||||
type: 'content',
|
||||
content: cleanedChunk,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
|
||||
}
|
||||
|
||||
// Send completion marker first to unblock the user
|
||||
controller.enqueue(encoder.encode(`data: {"type":"done"}\n\n`))
|
||||
|
||||
// Save conversation to database asynchronously (non-blocking)
|
||||
if (currentChat) {
|
||||
// Fire-and-forget database save to avoid blocking stream completion
|
||||
Promise.resolve()
|
||||
.then(async () => {
|
||||
try {
|
||||
const userMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: query,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: accumulatedResponse,
|
||||
timestamp: new Date().toISOString(),
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
}
|
||||
|
||||
const updatedMessages = [
|
||||
...conversationHistory,
|
||||
userMessage,
|
||||
assistantMessage,
|
||||
]
|
||||
|
||||
// Generate title if this is the first message
|
||||
let updatedTitle = currentChat.title ?? undefined
|
||||
if (!updatedTitle && conversationHistory.length === 0) {
|
||||
updatedTitle = await generateChatTitle(query)
|
||||
}
|
||||
|
||||
// Update the chat in database
|
||||
await updateChat(currentChat.id, session.user.id, {
|
||||
title: updatedTitle,
|
||||
messages: updatedMessages,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updated chat ${currentChat.id} with new docs messages`
|
||||
)
|
||||
} catch (dbError) {
|
||||
logger.error(`[${requestId}] Failed to save chat to database:`, dbError)
|
||||
// Database errors don't affect the user's streaming experience
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error(`[${requestId}] Unexpected error in async database save:`, error)
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Docs streaming error:`, error)
|
||||
try {
|
||||
const errorChunk = {
|
||||
type: 'error',
|
||||
error: 'Streaming failed',
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
|
||||
} catch (enqueueError) {
|
||||
logger.error(`[${requestId}] Failed to enqueue error response:`, enqueueError)
|
||||
}
|
||||
} finally {
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Handle non-streaming response
|
||||
logger.info(`[${requestId}] Docs RAG response generated successfully`)
|
||||
|
||||
// Save conversation to database if we have a chat
|
||||
if (currentChat) {
|
||||
const userMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: query,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: typeof result.response === 'string' ? result.response : '[Streaming Response]',
|
||||
timestamp: new Date().toISOString(),
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
}
|
||||
|
||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||
|
||||
// Generate title if this is the first message
|
||||
let updatedTitle = currentChat.title ?? undefined
|
||||
if (!updatedTitle && conversationHistory.length === 0) {
|
||||
updatedTitle = await generateChatTitle(query)
|
||||
}
|
||||
|
||||
// Update the chat in database
|
||||
await updateChat(currentChat.id, session.user.id, {
|
||||
title: updatedTitle,
|
||||
messages: updatedMessages,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Updated chat ${currentChat.id} with new docs messages`)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
response: result.response,
|
||||
sources: result.sources,
|
||||
chatId: currentChat?.id,
|
||||
metadata: {
|
||||
requestId,
|
||||
chunksFound: result.sources.length,
|
||||
query,
|
||||
topSimilarity: result.sources[0]?.similarity,
|
||||
provider,
|
||||
model,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Copilot docs error:`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,214 +1,425 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { OpenAI } from 'openai'
|
||||
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
createChat,
|
||||
deleteChat,
|
||||
generateChatTitle,
|
||||
getChat,
|
||||
listChats,
|
||||
sendMessage,
|
||||
updateChat,
|
||||
} from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('CopilotAPI')
|
||||
|
||||
const MessageSchema = z.object({
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
// Interface for StreamingExecution response
|
||||
interface StreamingExecution {
|
||||
stream: ReadableStream
|
||||
execution: Promise<any>
|
||||
}
|
||||
|
||||
// Schema for sending messages
|
||||
const SendMessageSchema = z.object({
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
stream: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
const RequestSchema = z.object({
|
||||
messages: z.array(MessageSchema),
|
||||
workflowState: z.object({
|
||||
blocks: z.record(z.any()),
|
||||
edges: z.array(z.any()),
|
||||
}),
|
||||
// Schema for docs queries
|
||||
const DocsQuerySchema = z.object({
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
topK: z.number().min(1).max(20).default(5),
|
||||
provider: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
stream: z.boolean().optional().default(false),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
const workflowActions = {
|
||||
addBlock: {
|
||||
description: 'Add one new block to the workflow',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['type'],
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['agent', 'api', 'condition', 'function', 'router'],
|
||||
description: 'The type of block to add',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Optional custom name for the block. Do not provide a name unless the user has specified it.',
|
||||
},
|
||||
position: {
|
||||
type: 'object',
|
||||
description:
|
||||
'Optional position for the block. Do not provide a position unless the user has specified it.',
|
||||
properties: {
|
||||
x: { type: 'number' },
|
||||
y: { type: 'number' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
addEdge: {
|
||||
description: 'Create a connection (edge) between two blocks',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['sourceId', 'targetId'],
|
||||
properties: {
|
||||
sourceId: {
|
||||
type: 'string',
|
||||
description: 'ID of the source block',
|
||||
},
|
||||
targetId: {
|
||||
type: 'string',
|
||||
description: 'ID of the target block',
|
||||
},
|
||||
sourceHandle: {
|
||||
type: 'string',
|
||||
description: 'Optional handle identifier for the source connection point',
|
||||
},
|
||||
targetHandle: {
|
||||
type: 'string',
|
||||
description: 'Optional handle identifier for the target connection point',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
removeBlock: {
|
||||
description: 'Remove a block from the workflow',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['id'],
|
||||
properties: {
|
||||
id: { type: 'string', description: 'ID of the block to remove' },
|
||||
},
|
||||
},
|
||||
},
|
||||
removeEdge: {
|
||||
description: 'Remove a connection (edge) between blocks',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['id'],
|
||||
properties: {
|
||||
id: { type: 'string', description: 'ID of the edge to remove' },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
// Schema for creating chats
|
||||
const CreateChatSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
title: z.string().optional(),
|
||||
initialMessage: z.string().optional(),
|
||||
})
|
||||
|
||||
// System prompt that references workflow state
|
||||
const getSystemPrompt = (workflowState: any) => {
|
||||
const blockCount = Object.keys(workflowState.blocks).length
|
||||
const edgeCount = workflowState.edges.length
|
||||
// Schema for updating chats
|
||||
const UpdateChatSchema = z.object({
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
messages: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
citations: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.number(),
|
||||
title: z.string(),
|
||||
url: z.string(),
|
||||
similarity: z.number().optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
title: z.string().optional(),
|
||||
})
|
||||
|
||||
// Create a summary of existing blocks
|
||||
const blockSummary = Object.values(workflowState.blocks)
|
||||
.map((block: any) => `- ${block.type} block named "${block.name}" with id ${block.id}`)
|
||||
.join('\n')
|
||||
// Schema for listing chats
|
||||
const ListChatsSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
limit: z.number().min(1).max(100).optional().default(50),
|
||||
offset: z.number().min(0).optional().default(0),
|
||||
})
|
||||
|
||||
// Create a summary of existing edges
|
||||
const edgeSummary = workflowState.edges
|
||||
.map((edge: any) => `- ${edge.source} -> ${edge.target} with id ${edge.id}`)
|
||||
.join('\n')
|
||||
|
||||
return `You are a workflow assistant that helps users modify their workflow by adding/removing blocks and connections.
|
||||
|
||||
Current Workflow State:
|
||||
${
|
||||
blockCount === 0
|
||||
? 'The workflow is empty.'
|
||||
: `${blockSummary}
|
||||
|
||||
Connections:
|
||||
${edgeCount === 0 ? 'No connections between blocks.' : edgeSummary}`
|
||||
}
|
||||
|
||||
When users request changes:
|
||||
- Consider existing blocks when suggesting connections
|
||||
- Provide clear feedback about what actions you've taken
|
||||
|
||||
Use the following functions to modify the workflow:
|
||||
1. Use the addBlock function to create a new block
|
||||
2. Use the addEdge function to connect one block to another
|
||||
3. Use the removeBlock function to remove a block
|
||||
4. Use the removeEdge function to remove a connection
|
||||
|
||||
Only use the provided functions and respond naturally to the user's requests.`
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
/**
|
||||
* POST /api/copilot
|
||||
* Send a message to the copilot
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
|
||||
try {
|
||||
// Validate API key
|
||||
const apiKey = request.headers.get('X-OpenAI-Key')
|
||||
if (!apiKey) {
|
||||
return NextResponse.json({ error: 'OpenAI API key is required' }, { status: 401 })
|
||||
const body = await req.json()
|
||||
const { message, chatId, workflowId, createNewChat, stream } = SendMessageSchema.parse(body)
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Parse and validate request body
|
||||
const body = await request.json()
|
||||
const validatedData = RequestSchema.parse(body)
|
||||
const { messages, workflowState } = validatedData
|
||||
|
||||
// Initialize OpenAI client
|
||||
const openai = new OpenAI({ apiKey })
|
||||
|
||||
// Create message history with workflow context
|
||||
const messageHistory = [
|
||||
{ role: 'system', content: getSystemPrompt(workflowState) },
|
||||
...messages,
|
||||
]
|
||||
|
||||
// Make OpenAI API call with workflow context
|
||||
const completion = await openai.chat.completions.create({
|
||||
model: 'gpt-4o',
|
||||
messages: messageHistory as ChatCompletionMessageParam[],
|
||||
tools: Object.entries(workflowActions).map(([name, config]) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name,
|
||||
description: config.description,
|
||||
parameters: config.parameters,
|
||||
},
|
||||
})),
|
||||
tool_choice: 'auto',
|
||||
logger.info(`[${requestId}] Copilot message: "${message}"`, {
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
stream,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
const message = completion.choices[0].message
|
||||
// Send message using the service
|
||||
const result = await sendMessage({
|
||||
message,
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
stream,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
// Process tool calls if present
|
||||
if (message.tool_calls) {
|
||||
logger.debug(`[${requestId}] Tool calls:`, {
|
||||
toolCalls: message.tool_calls,
|
||||
})
|
||||
const actions = message.tool_calls.map((call) => ({
|
||||
name: call.function.name,
|
||||
parameters: JSON.parse(call.function.arguments),
|
||||
}))
|
||||
// Handle streaming response (ReadableStream or StreamingExecution)
|
||||
let streamToRead: ReadableStream | null = null
|
||||
|
||||
return NextResponse.json({
|
||||
message: message.content || "I've updated the workflow based on your request.",
|
||||
actions,
|
||||
})
|
||||
// Debug logging to see what we actually got
|
||||
logger.info(`[${requestId}] Response type analysis:`, {
|
||||
responseType: typeof result.response,
|
||||
isReadableStream: result.response instanceof ReadableStream,
|
||||
hasStreamProperty:
|
||||
typeof result.response === 'object' && result.response && 'stream' in result.response,
|
||||
hasExecutionProperty:
|
||||
typeof result.response === 'object' && result.response && 'execution' in result.response,
|
||||
responseKeys:
|
||||
typeof result.response === 'object' && result.response ? Object.keys(result.response) : [],
|
||||
})
|
||||
|
||||
if (result.response instanceof ReadableStream) {
|
||||
logger.info(`[${requestId}] Direct ReadableStream detected`)
|
||||
streamToRead = result.response
|
||||
} else if (
|
||||
typeof result.response === 'object' &&
|
||||
result.response &&
|
||||
'stream' in result.response &&
|
||||
'execution' in result.response
|
||||
) {
|
||||
// Handle StreamingExecution (from providers with tool calls)
|
||||
logger.info(`[${requestId}] StreamingExecution detected`)
|
||||
const streamingExecution = result.response as StreamingExecution
|
||||
streamToRead = streamingExecution.stream
|
||||
|
||||
// No need to extract citations - LLM generates direct markdown links
|
||||
}
|
||||
|
||||
// Return response with no actions
|
||||
if (streamToRead) {
|
||||
logger.info(`[${requestId}] Returning streaming response`)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = streamToRead!.getReader()
|
||||
let accumulatedResponse = ''
|
||||
|
||||
// Send initial metadata
|
||||
const metadata = {
|
||||
type: 'metadata',
|
||||
chatId: result.chatId,
|
||||
metadata: {
|
||||
requestId,
|
||||
message,
|
||||
},
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
const chunkText = new TextDecoder().decode(value)
|
||||
accumulatedResponse += chunkText
|
||||
|
||||
const contentChunk = {
|
||||
type: 'content',
|
||||
content: chunkText,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
|
||||
}
|
||||
|
||||
// Send completion signal
|
||||
const completion = {
|
||||
type: 'complete',
|
||||
finalContent: accumulatedResponse,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(completion)}\n\n`))
|
||||
controller.close()
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Streaming error:`, error)
|
||||
const errorChunk = {
|
||||
type: 'error',
|
||||
error: 'Streaming failed',
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Handle non-streaming response
|
||||
logger.info(`[${requestId}] Chat response generated successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
message:
|
||||
message.content ||
|
||||
"I'm not sure what changes to make to the workflow. Can you please provide more specific instructions?",
|
||||
success: true,
|
||||
response: result.response,
|
||||
chatId: result.chatId,
|
||||
metadata: {
|
||||
requestId,
|
||||
message,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Copilot API error:`, { error })
|
||||
|
||||
// Handle specific error types
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request format', details: error.errors },
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'Failed to process copilot message' }, { status: 500 })
|
||||
logger.error(`[${requestId}] Copilot error:`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/copilot
|
||||
* List chats or get a specific chat
|
||||
*/
|
||||
export async function GET(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const chatId = searchParams.get('chatId')
|
||||
|
||||
// If chatId is provided, get specific chat
|
||||
if (chatId) {
|
||||
const chat = await getChat(chatId, session.user.id)
|
||||
if (!chat) {
|
||||
return NextResponse.json({ error: 'Chat not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
})
|
||||
}
|
||||
|
||||
// Otherwise, list chats
|
||||
const workflowId = searchParams.get('workflowId')
|
||||
const limit = Number.parseInt(searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(searchParams.get('offset') || '0')
|
||||
|
||||
if (!workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'workflowId is required for listing chats' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const chats = await listChats(session.user.id, workflowId, { limit, offset })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chats,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle GET request:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/copilot
|
||||
* Create a new chat
|
||||
*/
|
||||
export async function PUT(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { workflowId, title, initialMessage } = CreateChatSchema.parse(body)
|
||||
|
||||
logger.info(`Creating new chat for user ${session.user.id}, workflow ${workflowId}`)
|
||||
|
||||
const chat = await createChat(session.user.id, workflowId, {
|
||||
title,
|
||||
initialMessage,
|
||||
})
|
||||
|
||||
logger.info(`Created chat ${chat.id} for user ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Failed to create chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH /api/copilot
|
||||
* Update a chat with new messages
|
||||
*/
|
||||
export async function PATCH(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { chatId, messages, title } = UpdateChatSchema.parse(body)
|
||||
|
||||
logger.info(`Updating chat ${chatId} for user ${session.user.id}`)
|
||||
|
||||
// Get the current chat to check if it has a title
|
||||
const existingChat = await getChat(chatId, session.user.id)
|
||||
|
||||
let titleToUse = title
|
||||
|
||||
// Generate title if chat doesn't have one and we have messages
|
||||
if (!titleToUse && existingChat && !existingChat.title && messages && messages.length > 0) {
|
||||
const firstUserMessage = messages.find((msg) => msg.role === 'user')
|
||||
if (firstUserMessage) {
|
||||
logger.info('Generating LLM-based title for chat without title')
|
||||
try {
|
||||
titleToUse = await generateChatTitle(firstUserMessage.content)
|
||||
logger.info(`Generated title: ${titleToUse}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate chat title:', error)
|
||||
titleToUse = 'New Chat'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const chat = await updateChat(chatId, session.user.id, {
|
||||
messages,
|
||||
title: titleToUse,
|
||||
})
|
||||
|
||||
if (!chat) {
|
||||
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Failed to update chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/copilot
|
||||
* Delete a chat
|
||||
*/
|
||||
export async function DELETE(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const chatId = searchParams.get('chatId')
|
||||
|
||||
if (!chatId) {
|
||||
return NextResponse.json({ error: 'chatId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const success = await deleteChat(chatId, session.user.id)
|
||||
|
||||
if (!success) {
|
||||
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Chat deleted successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
76
apps/sim/app/api/docs/search/route.ts
Normal file
76
apps/sim/app/api/docs/search/route.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { searchDocumentation } from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('DocsSearchAPI')
|
||||
|
||||
// Request and response type definitions
|
||||
interface DocsSearchRequest {
|
||||
query: string
|
||||
topK?: number
|
||||
}
|
||||
|
||||
interface DocsSearchResult {
|
||||
id: number
|
||||
title: string
|
||||
url: string
|
||||
content: string
|
||||
similarity: number
|
||||
}
|
||||
|
||||
interface DocsSearchSuccessResponse {
|
||||
success: true
|
||||
results: DocsSearchResult[]
|
||||
query: string
|
||||
totalResults: number
|
||||
searchTime?: number
|
||||
}
|
||||
|
||||
interface DocsSearchErrorResponse {
|
||||
success: false
|
||||
error: string
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest
|
||||
): Promise<NextResponse<DocsSearchSuccessResponse | DocsSearchErrorResponse>> {
|
||||
try {
|
||||
const requestBody: DocsSearchRequest = await request.json()
|
||||
const { query, topK = 5 } = requestBody
|
||||
|
||||
if (!query) {
|
||||
const errorResponse: DocsSearchErrorResponse = {
|
||||
success: false,
|
||||
error: 'Query is required',
|
||||
}
|
||||
return NextResponse.json(errorResponse, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Executing documentation search', { query, topK })
|
||||
|
||||
const startTime = Date.now()
|
||||
const results = await searchDocumentation(query, { topK })
|
||||
const searchTime = Date.now() - startTime
|
||||
|
||||
logger.info(`Found ${results.length} documentation results`, { query })
|
||||
|
||||
const successResponse: DocsSearchSuccessResponse = {
|
||||
success: true,
|
||||
results,
|
||||
query,
|
||||
totalResults: results.length,
|
||||
searchTime,
|
||||
}
|
||||
|
||||
return NextResponse.json(successResponse)
|
||||
} catch (error) {
|
||||
logger.error('Documentation search API failed', error)
|
||||
|
||||
const errorResponse: DocsSearchErrorResponse = {
|
||||
success: false,
|
||||
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
}
|
||||
|
||||
return NextResponse.json(errorResponse, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -40,6 +40,7 @@ describe('Individual Folder API Route', () => {
|
||||
}
|
||||
|
||||
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
|
||||
function createFolderDbMock(options: FolderDbMockOptions = {}) {
|
||||
const {
|
||||
@@ -109,6 +110,12 @@ describe('Individual Folder API Route', () => {
|
||||
vi.resetModules()
|
||||
vi.clearAllMocks()
|
||||
setupCommonApiMocks()
|
||||
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
||||
|
||||
vi.doMock('@/lib/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -181,6 +188,72 @@ describe('Individual Folder API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Write access required to update folders')
|
||||
})
|
||||
|
||||
it('should allow folder update for write permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should allow folder update for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should return 400 when trying to set folder as its own parent', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
@@ -387,6 +460,68 @@ describe('Individual Folder API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions for delete', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Admin access required to delete folders')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only write permissions for delete', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions (not enough for delete)
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Admin access required to delete folders')
|
||||
})
|
||||
|
||||
it('should allow folder deletion for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
const dbMock = createFolderDbMock({
|
||||
folderLookupResult: mockFolder,
|
||||
})
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('success', true)
|
||||
})
|
||||
|
||||
it('should handle database errors during deletion', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowFolder } from '@/db/schema'
|
||||
|
||||
@@ -19,17 +20,31 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
const body = await request.json()
|
||||
const { name, color, isExpanded, parentId } = body
|
||||
|
||||
// Verify the folder exists and belongs to the user
|
||||
// Verify the folder exists
|
||||
const existingFolder = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, id), eq(workflowFolder.userId, session.user.id)))
|
||||
.where(eq(workflowFolder.id, id))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!existingFolder) {
|
||||
return NextResponse.json({ error: 'Folder not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has write permissions for the workspace
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
existingFolder.workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission || workspacePermission === 'read') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Write access required to update folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Prevent setting a folder as its own parent or creating circular references
|
||||
if (parentId && parentId === id) {
|
||||
return NextResponse.json({ error: 'Folder cannot be its own parent' }, { status: 400 })
|
||||
@@ -81,19 +96,33 @@ export async function DELETE(
|
||||
|
||||
const { id } = await params
|
||||
|
||||
// Verify the folder exists and belongs to the user
|
||||
// Verify the folder exists
|
||||
const existingFolder = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, id), eq(workflowFolder.userId, session.user.id)))
|
||||
.where(eq(workflowFolder.id, id))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!existingFolder) {
|
||||
return NextResponse.json({ error: 'Folder not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has admin permissions for the workspace (admin-only for deletions)
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
existingFolder.workspaceId
|
||||
)
|
||||
|
||||
if (workspacePermission !== 'admin') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Admin access required to delete folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Recursively delete folder and all its contents
|
||||
const deletionStats = await deleteFolderRecursively(id, session.user.id)
|
||||
const deletionStats = await deleteFolderRecursively(id, existingFolder.workspaceId)
|
||||
|
||||
logger.info('Deleted folder and all contents:', {
|
||||
id,
|
||||
@@ -113,41 +142,40 @@ export async function DELETE(
|
||||
// Helper function to recursively delete a folder and all its contents
|
||||
async function deleteFolderRecursively(
|
||||
folderId: string,
|
||||
userId: string
|
||||
workspaceId: string
|
||||
): Promise<{ folders: number; workflows: number }> {
|
||||
const stats = { folders: 0, workflows: 0 }
|
||||
|
||||
// Get all child folders first
|
||||
// Get all child folders first (workspace-scoped, not user-scoped)
|
||||
const childFolders = await db
|
||||
.select({ id: workflowFolder.id })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.userId, userId)))
|
||||
.where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId)))
|
||||
|
||||
// Recursively delete child folders
|
||||
for (const childFolder of childFolders) {
|
||||
const childStats = await deleteFolderRecursively(childFolder.id, userId)
|
||||
const childStats = await deleteFolderRecursively(childFolder.id, workspaceId)
|
||||
stats.folders += childStats.folders
|
||||
stats.workflows += childStats.workflows
|
||||
}
|
||||
|
||||
// Delete all workflows in this folder
|
||||
// Delete all workflows in this folder (workspace-scoped, not user-scoped)
|
||||
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows
|
||||
const workflowsInFolder = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.userId, userId)))
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
|
||||
|
||||
if (workflowsInFolder.length > 0) {
|
||||
await db
|
||||
.delete(workflow)
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.userId, userId)))
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
|
||||
|
||||
stats.workflows += workflowsInFolder.length
|
||||
}
|
||||
|
||||
// Delete this folder
|
||||
await db
|
||||
.delete(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, folderId), eq(workflowFolder.userId, userId)))
|
||||
await db.delete(workflowFolder).where(eq(workflowFolder.id, folderId))
|
||||
|
||||
stats.folders += 1
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ describe('Folders API Route', () => {
|
||||
const mockValues = vi.fn()
|
||||
const mockReturning = vi.fn()
|
||||
const mockTransaction = vi.fn()
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
@@ -72,6 +73,8 @@ describe('Folders API Route', () => {
|
||||
mockValues.mockReturnValue({ returning: mockReturning })
|
||||
mockReturning.mockReturnValue([mockFolders[0]])
|
||||
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: mockSelect,
|
||||
@@ -79,6 +82,10 @@ describe('Folders API Route', () => {
|
||||
transaction: mockTransaction,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -143,6 +150,42 @@ describe('Folders API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Workspace ID is required')
|
||||
})
|
||||
|
||||
it('should return 403 when user has no workspace permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue(null) // No permissions
|
||||
|
||||
const mockRequest = createMockRequest('GET')
|
||||
Object.defineProperty(mockRequest, 'url', {
|
||||
value: 'http://localhost:3000/api/folders?workspaceId=workspace-123',
|
||||
})
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(mockRequest)
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Access denied to this workspace')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const mockRequest = createMockRequest('GET')
|
||||
Object.defineProperty(mockRequest, 'url', {
|
||||
value: 'http://localhost:3000/api/folders?workspaceId=workspace-123',
|
||||
})
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(mockRequest)
|
||||
|
||||
expect(response.status).toBe(200) // Should work for read permissions
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folders')
|
||||
})
|
||||
|
||||
it('should handle database errors gracefully', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
@@ -295,6 +338,100 @@ describe('Folders API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Write or Admin access required to create folders')
|
||||
})
|
||||
|
||||
it('should allow folder creation for write permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should allow folder creation for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should return 400 when required fields are missing', async () => {
|
||||
const testCases = [
|
||||
{ name: '', workspaceId: 'workspace-123' }, // Missing name
|
||||
|
||||
@@ -2,6 +2,7 @@ import { and, asc, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflowFolder } from '@/db/schema'
|
||||
|
||||
@@ -22,13 +23,23 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Fetch all folders for the workspace, ordered by sortOrder and createdAt
|
||||
// Check if user has workspace permissions
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission) {
|
||||
return NextResponse.json({ error: 'Access denied to this workspace' }, { status: 403 })
|
||||
}
|
||||
|
||||
// If user has workspace permissions, fetch ALL folders in the workspace
|
||||
// This allows shared workspace members to see folders created by other users
|
||||
const folders = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(
|
||||
and(eq(workflowFolder.workspaceId, workspaceId), eq(workflowFolder.userId, session.user.id))
|
||||
)
|
||||
.where(eq(workflowFolder.workspaceId, workspaceId))
|
||||
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
|
||||
|
||||
return NextResponse.json({ folders })
|
||||
@@ -53,19 +64,33 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Name and workspace ID are required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Check if user has workspace permissions (at least 'write' access to create folders)
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission || workspacePermission === 'read') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Write or Admin access required to create folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Generate a new ID
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Use transaction to ensure sortOrder consistency
|
||||
const newFolder = await db.transaction(async (tx) => {
|
||||
// Get the next sort order for the parent (or root level)
|
||||
// Consider all folders in the workspace, not just those created by current user
|
||||
const existingFolders = await tx
|
||||
.select({ sortOrder: workflowFolder.sortOrder })
|
||||
.from(workflowFolder)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowFolder.workspaceId, workspaceId),
|
||||
eq(workflowFolder.userId, session.user.id),
|
||||
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -0,0 +1,413 @@
|
||||
/**
|
||||
* Tests for knowledge document chunks API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import type { DocumentAccessCheck } from '../../../../utils'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/tokenization/estimators', () => ({
|
||||
estimateTokenCount: vi.fn().mockReturnValue({ count: 452 }),
|
||||
}))
|
||||
|
||||
vi.mock('@/providers/utils', () => ({
|
||||
calculateCost: vi.fn().mockReturnValue({
|
||||
input: 0.00000904,
|
||||
output: 0,
|
||||
total: 0.00000904,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('../../../../utils', () => ({
|
||||
checkDocumentAccess: vi.fn(),
|
||||
generateEmbeddings: vi.fn().mockResolvedValue([[0.1, 0.2, 0.3, 0.4, 0.5]]),
|
||||
}))
|
||||
|
||||
describe('Knowledge Document Chunks API Route', () => {
|
||||
const mockAuth$ = mockAuth()
|
||||
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockReturnThis(),
|
||||
offset: vi.fn().mockReturnThis(),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
delete: vi.fn().mockReturnThis(),
|
||||
transaction: vi.fn(),
|
||||
}
|
||||
|
||||
const mockGetUserId = vi.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: mockDbChain,
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/auth/oauth/utils', () => ({
|
||||
getUserId: mockGetUserId,
|
||||
}))
|
||||
|
||||
Object.values(mockDbChain).forEach((fn) => {
|
||||
if (typeof fn === 'function' && fn !== mockDbChain.values && fn !== mockDbChain.returning) {
|
||||
fn.mockClear().mockReturnThis()
|
||||
}
|
||||
})
|
||||
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-chunk-uuid-1234'),
|
||||
createHash: vi.fn().mockReturnValue({
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn().mockReturnValue('mock-hash-123'),
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('POST /api/knowledge/[id]/documents/[documentId]/chunks', () => {
|
||||
const validChunkData = {
|
||||
content: 'This is test chunk content for uploading to the knowledge base document.',
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const mockDocumentAccess = {
|
||||
hasAccess: true,
|
||||
notFound: false,
|
||||
reason: '',
|
||||
document: {
|
||||
id: 'doc-123',
|
||||
processingStatus: 'completed',
|
||||
tag1: 'tag1-value',
|
||||
tag2: 'tag2-value',
|
||||
tag3: null,
|
||||
tag4: null,
|
||||
tag5: null,
|
||||
tag6: null,
|
||||
tag7: null,
|
||||
},
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', documentId: 'doc-123' })
|
||||
|
||||
it('should create chunk successfully with cost tracking', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
// Mock transaction
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([{ chunkIndex: 0 }]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
|
||||
// Verify cost tracking
|
||||
expect(data.data.cost).toBeDefined()
|
||||
expect(data.data.cost.input).toBe(0.00000904)
|
||||
expect(data.data.cost.output).toBe(0)
|
||||
expect(data.data.cost.total).toBe(0.00000904)
|
||||
expect(data.data.cost.tokens).toEqual({
|
||||
prompt: 452,
|
||||
completion: 0,
|
||||
total: 452,
|
||||
})
|
||||
expect(data.data.cost.model).toBe('text-embedding-3-small')
|
||||
expect(data.data.cost.pricing).toEqual({
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
})
|
||||
|
||||
// Verify function calls
|
||||
expect(estimateTokenCount).toHaveBeenCalledWith(validChunkData.content, 'openai')
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 452, 0, false)
|
||||
})
|
||||
|
||||
it('should handle workflow-based authentication', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
const workflowData = {
|
||||
...validChunkData,
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', workflowData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(mockGetUserId).toHaveBeenCalledWith(expect.any(String), 'workflow-123')
|
||||
})
|
||||
|
||||
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('should return not found for workflow that does not exist', async () => {
|
||||
const workflowData = {
|
||||
...validChunkData,
|
||||
workflowId: 'nonexistent-workflow',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', workflowData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
expect(data.error).toBe('Workflow not found')
|
||||
})
|
||||
|
||||
it.concurrent('should return not found for document access denied', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
hasAccess: false,
|
||||
notFound: true,
|
||||
reason: 'Document not found',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
expect(data.error).toBe('Document not found')
|
||||
})
|
||||
|
||||
it('should return unauthorized for unauthorized document access', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
hasAccess: false,
|
||||
notFound: false,
|
||||
reason: 'Unauthorized access',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('should reject chunks for failed documents', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
...mockDocumentAccess,
|
||||
document: {
|
||||
...mockDocumentAccess.document!,
|
||||
processingStatus: 'failed',
|
||||
},
|
||||
} as DocumentAccessCheck)
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Cannot add chunks to failed document')
|
||||
})
|
||||
|
||||
it.concurrent('should validate chunk data', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const invalidData = {
|
||||
content: '', // Empty content
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', invalidData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Invalid request data')
|
||||
expect(data.details).toBeDefined()
|
||||
})
|
||||
|
||||
it('should inherit tags from parent document', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockImplementation((data) => {
|
||||
// Verify that tags are inherited from document
|
||||
expect(data.tag1).toBe('tag1-value')
|
||||
expect(data.tag2).toBe('tag2-value')
|
||||
expect(data.tag3).toBe(null)
|
||||
return Promise.resolve(undefined)
|
||||
}),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
await POST(req, { params: mockParams })
|
||||
|
||||
expect(mockTx.values).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it.concurrent('should handle cost calculation with different content lengths', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
// Mock larger content with more tokens
|
||||
vi.mocked(estimateTokenCount).mockReturnValue({
|
||||
count: 1000,
|
||||
confidence: 'high',
|
||||
provider: 'openai',
|
||||
method: 'precise',
|
||||
})
|
||||
vi.mocked(calculateCost).mockReturnValue({
|
||||
input: 0.00002,
|
||||
output: 0,
|
||||
total: 0.00002,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
})
|
||||
|
||||
const largeChunkData = {
|
||||
content:
|
||||
'This is a much larger chunk of content that would result in significantly more tokens when processed through the OpenAI tokenization system for embedding generation. This content is designed to test the cost calculation accuracy with larger input sizes.',
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', largeChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.cost.input).toBe(0.00002)
|
||||
expect(data.data.cost.tokens.prompt).toBe(1000)
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 1000, 0, false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -4,9 +4,11 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { document, embedding } from '@/db/schema'
|
||||
import { calculateCost } from '@/providers/utils'
|
||||
import { checkDocumentAccess, generateEmbeddings } from '../../../../utils'
|
||||
|
||||
const logger = createLogger('DocumentChunksAPI')
|
||||
@@ -118,7 +120,13 @@ export async function GET(
|
||||
enabled: embedding.enabled,
|
||||
startOffset: embedding.startOffset,
|
||||
endOffset: embedding.endOffset,
|
||||
metadata: embedding.metadata,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
createdAt: embedding.createdAt,
|
||||
updatedAt: embedding.updatedAt,
|
||||
})
|
||||
@@ -211,6 +219,9 @@ export async function POST(
|
||||
logger.info(`[${requestId}] Generating embedding for manual chunk`)
|
||||
const embeddings = await generateEmbeddings([validatedData.content])
|
||||
|
||||
// Calculate accurate token count for both database storage and cost calculation
|
||||
const tokenCount = estimateTokenCount(validatedData.content, 'openai')
|
||||
|
||||
const chunkId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
@@ -234,12 +245,19 @@ export async function POST(
|
||||
chunkHash: crypto.createHash('sha256').update(validatedData.content).digest('hex'),
|
||||
content: validatedData.content,
|
||||
contentLength: validatedData.content.length,
|
||||
tokenCount: Math.ceil(validatedData.content.length / 4), // Rough approximation
|
||||
tokenCount: tokenCount.count, // Use accurate token count
|
||||
embedding: embeddings[0],
|
||||
embeddingModel: 'text-embedding-3-small',
|
||||
startOffset: 0, // Manual chunks don't have document offsets
|
||||
endOffset: validatedData.content.length,
|
||||
metadata: { manual: true }, // Mark as manually created
|
||||
// Inherit tags from parent document
|
||||
tag1: doc.tag1,
|
||||
tag2: doc.tag2,
|
||||
tag3: doc.tag3,
|
||||
tag4: doc.tag4,
|
||||
tag5: doc.tag5,
|
||||
tag6: doc.tag6,
|
||||
tag7: doc.tag7,
|
||||
enabled: validatedData.enabled,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
@@ -263,9 +281,38 @@ export async function POST(
|
||||
|
||||
logger.info(`[${requestId}] Manual chunk created: ${chunkId} in document ${documentId}`)
|
||||
|
||||
// Calculate cost for the embedding (with fallback if calculation fails)
|
||||
let cost = null
|
||||
try {
|
||||
cost = calculateCost('text-embedding-3-small', tokenCount.count, 0, false)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to calculate cost for chunk upload`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
// Continue without cost information rather than failing the upload
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: newChunk,
|
||||
data: {
|
||||
...newChunk,
|
||||
...(cost
|
||||
? {
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
tokens: {
|
||||
prompt: tokenCount.count,
|
||||
completion: 0,
|
||||
total: tokenCount.count,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: cost.pricing,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
})
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
|
||||
@@ -153,6 +153,14 @@ const CreateDocumentSchema = z.object({
|
||||
fileUrl: z.string().url('File URL must be valid'),
|
||||
fileSize: z.number().min(1, 'File size must be greater than 0'),
|
||||
mimeType: z.string().min(1, 'MIME type is required'),
|
||||
// Document tags for filtering
|
||||
tag1: z.string().optional(),
|
||||
tag2: z.string().optional(),
|
||||
tag3: z.string().optional(),
|
||||
tag4: z.string().optional(),
|
||||
tag5: z.string().optional(),
|
||||
tag6: z.string().optional(),
|
||||
tag7: z.string().optional(),
|
||||
})
|
||||
|
||||
const BulkCreateDocumentsSchema = z.object({
|
||||
@@ -229,6 +237,14 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
processingError: document.processingError,
|
||||
enabled: document.enabled,
|
||||
uploadedAt: document.uploadedAt,
|
||||
// Include tags in response
|
||||
tag1: document.tag1,
|
||||
tag2: document.tag2,
|
||||
tag3: document.tag3,
|
||||
tag4: document.tag4,
|
||||
tag5: document.tag5,
|
||||
tag6: document.tag6,
|
||||
tag7: document.tag7,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(...whereConditions))
|
||||
@@ -298,6 +314,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
processingStatus: 'pending' as const,
|
||||
enabled: true,
|
||||
uploadedAt: now,
|
||||
// Include tags from upload
|
||||
tag1: docData.tag1 || null,
|
||||
tag2: docData.tag2 || null,
|
||||
tag3: docData.tag3 || null,
|
||||
tag4: docData.tag4 || null,
|
||||
tag5: docData.tag5 || null,
|
||||
tag6: docData.tag6 || null,
|
||||
tag7: docData.tag7 || null,
|
||||
}
|
||||
|
||||
await tx.insert(document).values(newDocument)
|
||||
@@ -372,6 +396,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
characterCount: 0,
|
||||
enabled: true,
|
||||
uploadedAt: now,
|
||||
// Include tags from upload
|
||||
tag1: validatedData.tag1 || null,
|
||||
tag2: validatedData.tag2 || null,
|
||||
tag3: validatedData.tag3 || null,
|
||||
tag4: validatedData.tag4 || null,
|
||||
tag5: validatedData.tag5 || null,
|
||||
tag6: validatedData.tag6 || null,
|
||||
tag7: validatedData.tag7 || null,
|
||||
}
|
||||
|
||||
await db.insert(document).values(newDocument)
|
||||
|
||||
@@ -8,7 +8,6 @@ import { document, knowledgeBase } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('KnowledgeBaseAPI')
|
||||
|
||||
// Schema for knowledge base creation
|
||||
const CreateKnowledgeBaseSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
description: z.string().optional(),
|
||||
|
||||
@@ -34,6 +34,23 @@ vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/tokenization/estimators', () => ({
|
||||
estimateTokenCount: vi.fn().mockReturnValue({ count: 521 }),
|
||||
}))
|
||||
|
||||
vi.mock('@/providers/utils', () => ({
|
||||
calculateCost: vi.fn().mockReturnValue({
|
||||
input: 0.00001042,
|
||||
output: 0,
|
||||
total: 0.00001042,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Knowledge Search API Route', () => {
|
||||
@@ -206,7 +223,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(mockGetUserId).toHaveBeenCalledWith(expect.any(String), 'workflow-123')
|
||||
})
|
||||
|
||||
it('should return unauthorized for unauthenticated request', async () => {
|
||||
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
@@ -218,7 +235,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('should return not found for workflow that does not exist', async () => {
|
||||
it.concurrent('should return not found for workflow that does not exist', async () => {
|
||||
const workflowData = {
|
||||
...validSearchData,
|
||||
workflowId: 'nonexistent-workflow',
|
||||
@@ -268,7 +285,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Knowledge bases not found: kb-missing')
|
||||
})
|
||||
|
||||
it('should validate search parameters', async () => {
|
||||
it.concurrent('should validate search parameters', async () => {
|
||||
const invalidData = {
|
||||
knowledgeBaseIds: '', // Empty string
|
||||
query: '', // Empty query
|
||||
@@ -314,7 +331,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.data.topK).toBe(10) // Default value
|
||||
})
|
||||
|
||||
it('should handle OpenAI API errors', async () => {
|
||||
it.concurrent('should handle OpenAI API errors', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
|
||||
@@ -334,7 +351,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it('should handle missing OpenAI API key', async () => {
|
||||
it.concurrent('should handle missing OpenAI API key', async () => {
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
OPENAI_API_KEY: undefined,
|
||||
@@ -353,7 +370,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it('should handle database errors during search', async () => {
|
||||
it.concurrent('should handle database errors during search', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockRejectedValueOnce(new Error('Database error'))
|
||||
@@ -375,7 +392,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it('should handle invalid OpenAI response format', async () => {
|
||||
it.concurrent('should handle invalid OpenAI response format', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
|
||||
@@ -395,5 +412,124 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
describe('Cost tracking', () => {
|
||||
it.concurrent('should include cost information in successful search response', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
|
||||
// Verify cost information is included
|
||||
expect(data.data.cost).toBeDefined()
|
||||
expect(data.data.cost.input).toBe(0.00001042)
|
||||
expect(data.data.cost.output).toBe(0)
|
||||
expect(data.data.cost.total).toBe(0.00001042)
|
||||
expect(data.data.cost.tokens).toEqual({
|
||||
prompt: 521,
|
||||
completion: 0,
|
||||
total: 521,
|
||||
})
|
||||
expect(data.data.cost.model).toBe('text-embedding-3-small')
|
||||
expect(data.data.cost.pricing).toEqual({
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
})
|
||||
})
|
||||
|
||||
it('should call cost calculation functions with correct parameters', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
const { POST } = await import('./route')
|
||||
await POST(req)
|
||||
|
||||
// Verify token estimation was called with correct parameters
|
||||
expect(estimateTokenCount).toHaveBeenCalledWith('test search query', 'openai')
|
||||
|
||||
// Verify cost calculation was called with correct parameters
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 521, 0, false)
|
||||
})
|
||||
|
||||
it('should handle cost calculation with different query lengths', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
// Mock different token count for longer query
|
||||
vi.mocked(estimateTokenCount).mockReturnValue({
|
||||
count: 1042,
|
||||
confidence: 'high',
|
||||
provider: 'openai',
|
||||
method: 'precise',
|
||||
})
|
||||
vi.mocked(calculateCost).mockReturnValue({
|
||||
input: 0.00002084,
|
||||
output: 0,
|
||||
total: 0.00002084,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
})
|
||||
|
||||
const longQueryData = {
|
||||
...validSearchData,
|
||||
query:
|
||||
'This is a much longer search query with many more tokens to test cost calculation accuracy',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', longQueryData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.cost.input).toBe(0.00002084)
|
||||
expect(data.data.cost.tokens.prompt).toBe(1042)
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 1042, 0, false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,12 +4,37 @@ import { z } from 'zod'
|
||||
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { embedding, knowledgeBase } from '@/db/schema'
|
||||
import { calculateCost } from '@/providers/utils'
|
||||
|
||||
const logger = createLogger('VectorSearchAPI')
|
||||
|
||||
function getTagFilters(filters: Record<string, string>, embedding: any) {
|
||||
return Object.entries(filters).map(([key, value]) => {
|
||||
switch (key) {
|
||||
case 'tag1':
|
||||
return sql`LOWER(${embedding.tag1}) = LOWER(${value})`
|
||||
case 'tag2':
|
||||
return sql`LOWER(${embedding.tag2}) = LOWER(${value})`
|
||||
case 'tag3':
|
||||
return sql`LOWER(${embedding.tag3}) = LOWER(${value})`
|
||||
case 'tag4':
|
||||
return sql`LOWER(${embedding.tag4}) = LOWER(${value})`
|
||||
case 'tag5':
|
||||
return sql`LOWER(${embedding.tag5}) = LOWER(${value})`
|
||||
case 'tag6':
|
||||
return sql`LOWER(${embedding.tag6}) = LOWER(${value})`
|
||||
case 'tag7':
|
||||
return sql`LOWER(${embedding.tag7}) = LOWER(${value})`
|
||||
default:
|
||||
return sql`1=1` // No-op for unknown keys
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
class APIError extends Error {
|
||||
public status: number
|
||||
|
||||
@@ -27,6 +52,17 @@ const VectorSearchSchema = z.object({
|
||||
]),
|
||||
query: z.string().min(1, 'Search query is required'),
|
||||
topK: z.number().min(1).max(100).default(10),
|
||||
filters: z
|
||||
.object({
|
||||
tag1: z.string().optional(),
|
||||
tag2: z.string().optional(),
|
||||
tag3: z.string().optional(),
|
||||
tag4: z.string().optional(),
|
||||
tag5: z.string().optional(),
|
||||
tag6: z.string().optional(),
|
||||
tag7: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
async function generateSearchEmbedding(query: string): Promise<number[]> {
|
||||
@@ -102,7 +138,8 @@ async function executeParallelQueries(
|
||||
knowledgeBaseIds: string[],
|
||||
queryVector: string,
|
||||
topK: number,
|
||||
distanceThreshold: number
|
||||
distanceThreshold: number,
|
||||
filters?: Record<string, string>
|
||||
) {
|
||||
const parallelLimit = Math.ceil(topK / knowledgeBaseIds.length) + 5
|
||||
|
||||
@@ -113,7 +150,13 @@ async function executeParallelQueries(
|
||||
content: embedding.content,
|
||||
documentId: embedding.documentId,
|
||||
chunkIndex: embedding.chunkIndex,
|
||||
metadata: embedding.metadata,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
|
||||
knowledgeBaseId: embedding.knowledgeBaseId,
|
||||
})
|
||||
@@ -122,7 +165,8 @@ async function executeParallelQueries(
|
||||
and(
|
||||
eq(embedding.knowledgeBaseId, kbId),
|
||||
eq(embedding.enabled, true),
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`,
|
||||
...(filters ? getTagFilters(filters, embedding) : [])
|
||||
)
|
||||
)
|
||||
.orderBy(sql`${embedding.embedding} <=> ${queryVector}::vector`)
|
||||
@@ -139,7 +183,8 @@ async function executeSingleQuery(
|
||||
knowledgeBaseIds: string[],
|
||||
queryVector: string,
|
||||
topK: number,
|
||||
distanceThreshold: number
|
||||
distanceThreshold: number,
|
||||
filters?: Record<string, string>
|
||||
) {
|
||||
return await db
|
||||
.select({
|
||||
@@ -147,7 +192,13 @@ async function executeSingleQuery(
|
||||
content: embedding.content,
|
||||
documentId: embedding.documentId,
|
||||
chunkIndex: embedding.chunkIndex,
|
||||
metadata: embedding.metadata,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
|
||||
})
|
||||
.from(embedding)
|
||||
@@ -155,7 +206,29 @@ async function executeSingleQuery(
|
||||
and(
|
||||
inArray(embedding.knowledgeBaseId, knowledgeBaseIds),
|
||||
eq(embedding.enabled, true),
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`,
|
||||
...(filters
|
||||
? Object.entries(filters).map(([key, value]) => {
|
||||
switch (key) {
|
||||
case 'tag1':
|
||||
return sql`LOWER(${embedding.tag1}) = LOWER(${value})`
|
||||
case 'tag2':
|
||||
return sql`LOWER(${embedding.tag2}) = LOWER(${value})`
|
||||
case 'tag3':
|
||||
return sql`LOWER(${embedding.tag3}) = LOWER(${value})`
|
||||
case 'tag4':
|
||||
return sql`LOWER(${embedding.tag4}) = LOWER(${value})`
|
||||
case 'tag5':
|
||||
return sql`LOWER(${embedding.tag5}) = LOWER(${value})`
|
||||
case 'tag6':
|
||||
return sql`LOWER(${embedding.tag6}) = LOWER(${value})`
|
||||
case 'tag7':
|
||||
return sql`LOWER(${embedding.tag7}) = LOWER(${value})`
|
||||
default:
|
||||
return sql`1=1` // No-op for unknown keys
|
||||
}
|
||||
})
|
||||
: [])
|
||||
)
|
||||
)
|
||||
.orderBy(sql`${embedding.embedding} <=> ${queryVector}::vector`)
|
||||
@@ -231,7 +304,8 @@ export async function POST(request: NextRequest) {
|
||||
foundKbIds,
|
||||
queryVector,
|
||||
validatedData.topK,
|
||||
strategy.distanceThreshold
|
||||
strategy.distanceThreshold,
|
||||
validatedData.filters
|
||||
)
|
||||
results = mergeAndRankResults(parallelResults, validatedData.topK)
|
||||
} else {
|
||||
@@ -240,10 +314,24 @@ export async function POST(request: NextRequest) {
|
||||
foundKbIds,
|
||||
queryVector,
|
||||
validatedData.topK,
|
||||
strategy.distanceThreshold
|
||||
strategy.distanceThreshold,
|
||||
validatedData.filters
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate cost for the embedding (with fallback if calculation fails)
|
||||
let cost = null
|
||||
let tokenCount = null
|
||||
try {
|
||||
tokenCount = estimateTokenCount(validatedData.query, 'openai')
|
||||
cost = calculateCost('text-embedding-3-small', tokenCount.count, 0, false)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to calculate cost for search query`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
// Continue without cost information rather than failing the search
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
@@ -252,7 +340,13 @@ export async function POST(request: NextRequest) {
|
||||
content: result.content,
|
||||
documentId: result.documentId,
|
||||
chunkIndex: result.chunkIndex,
|
||||
metadata: result.metadata,
|
||||
tag1: result.tag1,
|
||||
tag2: result.tag2,
|
||||
tag3: result.tag3,
|
||||
tag4: result.tag4,
|
||||
tag5: result.tag5,
|
||||
tag6: result.tag6,
|
||||
tag7: result.tag7,
|
||||
similarity: 1 - result.distance,
|
||||
})),
|
||||
query: validatedData.query,
|
||||
@@ -260,6 +354,22 @@ export async function POST(request: NextRequest) {
|
||||
knowledgeBaseId: foundKbIds[0],
|
||||
topK: validatedData.topK,
|
||||
totalResults: results.length,
|
||||
...(cost && tokenCount
|
||||
? {
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
tokens: {
|
||||
prompt: tokenCount.count,
|
||||
completion: 0,
|
||||
total: tokenCount.count,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: cost.pricing,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
})
|
||||
} catch (validationError) {
|
||||
|
||||
@@ -73,6 +73,14 @@ export interface DocumentData {
|
||||
enabled: boolean
|
||||
deletedAt?: Date | null
|
||||
uploadedAt: Date
|
||||
// Document tags
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
}
|
||||
|
||||
export interface EmbeddingData {
|
||||
@@ -88,7 +96,14 @@ export interface EmbeddingData {
|
||||
embeddingModel: string
|
||||
startOffset: number
|
||||
endOffset: number
|
||||
metadata: unknown
|
||||
// Tag fields for filtering
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
enabled: boolean
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
@@ -445,7 +460,26 @@ export async function processDocumentAsync(
|
||||
const chunkTexts = processed.chunks.map((chunk) => chunk.text)
|
||||
const embeddings = chunkTexts.length > 0 ? await generateEmbeddings(chunkTexts) : []
|
||||
|
||||
logger.info(`[${documentId}] Embeddings generated, updating document record`)
|
||||
logger.info(`[${documentId}] Embeddings generated, fetching document tags`)
|
||||
|
||||
// Fetch document to get tags
|
||||
const documentRecord = await db
|
||||
.select({
|
||||
tag1: document.tag1,
|
||||
tag2: document.tag2,
|
||||
tag3: document.tag3,
|
||||
tag4: document.tag4,
|
||||
tag5: document.tag5,
|
||||
tag6: document.tag6,
|
||||
tag7: document.tag7,
|
||||
})
|
||||
.from(document)
|
||||
.where(eq(document.id, documentId))
|
||||
.limit(1)
|
||||
|
||||
const documentTags = documentRecord[0] || {}
|
||||
|
||||
logger.info(`[${documentId}] Creating embedding records with tags`)
|
||||
|
||||
const embeddingRecords = processed.chunks.map((chunk, chunkIndex) => ({
|
||||
id: crypto.randomUUID(),
|
||||
@@ -460,7 +494,14 @@ export async function processDocumentAsync(
|
||||
embeddingModel: 'text-embedding-3-small',
|
||||
startOffset: chunk.metadata.startIndex,
|
||||
endOffset: chunk.metadata.endIndex,
|
||||
metadata: {},
|
||||
// Copy tags from document
|
||||
tag1: documentTags.tag1,
|
||||
tag2: documentTags.tag2,
|
||||
tag3: documentTags.tag3,
|
||||
tag4: documentTags.tag4,
|
||||
tag5: documentTags.tag5,
|
||||
tag6: documentTags.tag6,
|
||||
tag7: documentTags.tag7,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}))
|
||||
|
||||
76
apps/sim/app/api/logs/[executionId]/frozen-canvas/route.ts
Normal file
76
apps/sim/app/api/logs/[executionId]/frozen-canvas/route.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('FrozenCanvasAPI')
|
||||
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ executionId: string }> }
|
||||
) {
|
||||
try {
|
||||
const { executionId } = await params
|
||||
|
||||
logger.debug(`Fetching frozen canvas data for execution: ${executionId}`)
|
||||
|
||||
// Get the workflow execution log to find the snapshot
|
||||
const [workflowLog] = await db
|
||||
.select()
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowLog) {
|
||||
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get the workflow state snapshot
|
||||
const [snapshot] = await db
|
||||
.select()
|
||||
.from(workflowExecutionSnapshots)
|
||||
.where(eq(workflowExecutionSnapshots.id, workflowLog.stateSnapshotId))
|
||||
.limit(1)
|
||||
|
||||
if (!snapshot) {
|
||||
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const response = {
|
||||
executionId,
|
||||
workflowId: workflowLog.workflowId,
|
||||
workflowState: snapshot.stateData,
|
||||
executionMetadata: {
|
||||
trigger: workflowLog.trigger,
|
||||
startedAt: workflowLog.startedAt.toISOString(),
|
||||
endedAt: workflowLog.endedAt?.toISOString(),
|
||||
totalDurationMs: workflowLog.totalDurationMs,
|
||||
blockStats: {
|
||||
total: workflowLog.blockCount,
|
||||
success: workflowLog.successCount,
|
||||
error: workflowLog.errorCount,
|
||||
skipped: workflowLog.skippedCount,
|
||||
},
|
||||
cost: {
|
||||
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
|
||||
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
|
||||
output: workflowLog.totalOutputCost
|
||||
? Number.parseFloat(workflowLog.totalOutputCost)
|
||||
: null,
|
||||
},
|
||||
totalTokens: workflowLog.totalTokens,
|
||||
},
|
||||
}
|
||||
|
||||
logger.debug(`Successfully fetched frozen canvas data for execution: ${executionId}`)
|
||||
logger.debug(
|
||||
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
||||
)
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching frozen canvas data:', error)
|
||||
return NextResponse.json({ error: 'Failed to fetch frozen canvas data' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,13 @@
|
||||
import { PutObjectCommand } from '@aws-sdk/client-s3'
|
||||
import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { snapshotService } from '@/lib/logs/snapshot-service'
|
||||
import { getS3Client } from '@/lib/uploads/s3/s3-client'
|
||||
import { db } from '@/db'
|
||||
import { subscription, user, workflow, workflowLogs } from '@/db/schema'
|
||||
import { subscription, user, workflow, workflowExecutionLogs } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -17,17 +19,11 @@ const S3_CONFIG = {
|
||||
region: env.AWS_REGION || '',
|
||||
}
|
||||
|
||||
export async function GET(request: Request) {
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const authHeader = request.headers.get('authorization')
|
||||
|
||||
if (!env.CRON_SECRET) {
|
||||
return new NextResponse('Configuration error: Cron secret is not set', { status: 500 })
|
||||
}
|
||||
|
||||
if (!authHeader || authHeader !== `Bearer ${env.CRON_SECRET}`) {
|
||||
logger.warn('Unauthorized access attempt to logs cleanup endpoint')
|
||||
return new NextResponse('Unauthorized', { status: 401 })
|
||||
const authError = verifyCronAuth(request, 'logs cleanup')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
if (!S3_CONFIG.bucket || !S3_CONFIG.region) {
|
||||
@@ -66,99 +62,143 @@ export async function GET(request: Request) {
|
||||
const workflowIds = workflowsQuery.map((w) => w.id)
|
||||
|
||||
const results = {
|
||||
total: 0,
|
||||
archived: 0,
|
||||
archiveFailed: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
enhancedLogs: {
|
||||
total: 0,
|
||||
archived: 0,
|
||||
archiveFailed: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
},
|
||||
snapshots: {
|
||||
cleaned: 0,
|
||||
cleanupFailed: 0,
|
||||
},
|
||||
}
|
||||
|
||||
const startTime = Date.now()
|
||||
const MAX_BATCHES = 10
|
||||
|
||||
// Process enhanced logging cleanup
|
||||
let batchesProcessed = 0
|
||||
let hasMoreLogs = true
|
||||
|
||||
logger.info(`Starting enhanced logs cleanup for ${workflowIds.length} workflows`)
|
||||
|
||||
while (hasMoreLogs && batchesProcessed < MAX_BATCHES) {
|
||||
const oldLogs = await db
|
||||
// Query enhanced execution logs that need cleanup
|
||||
const oldEnhancedLogs = await db
|
||||
.select({
|
||||
id: workflowLogs.id,
|
||||
workflowId: workflowLogs.workflowId,
|
||||
executionId: workflowLogs.executionId,
|
||||
level: workflowLogs.level,
|
||||
message: workflowLogs.message,
|
||||
duration: workflowLogs.duration,
|
||||
trigger: workflowLogs.trigger,
|
||||
createdAt: workflowLogs.createdAt,
|
||||
metadata: workflowLogs.metadata,
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
message: workflowExecutionLogs.message,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
blockCount: workflowExecutionLogs.blockCount,
|
||||
successCount: workflowExecutionLogs.successCount,
|
||||
errorCount: workflowExecutionLogs.errorCount,
|
||||
skippedCount: workflowExecutionLogs.skippedCount,
|
||||
totalCost: workflowExecutionLogs.totalCost,
|
||||
totalInputCost: workflowExecutionLogs.totalInputCost,
|
||||
totalOutputCost: workflowExecutionLogs.totalOutputCost,
|
||||
totalTokens: workflowExecutionLogs.totalTokens,
|
||||
metadata: workflowExecutionLogs.metadata,
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
})
|
||||
.from(workflowLogs)
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowLogs.workflowId, workflowIds),
|
||||
lt(workflowLogs.createdAt, retentionDate)
|
||||
inArray(workflowExecutionLogs.workflowId, workflowIds),
|
||||
lt(workflowExecutionLogs.createdAt, retentionDate)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
|
||||
results.total += oldLogs.length
|
||||
results.enhancedLogs.total += oldEnhancedLogs.length
|
||||
|
||||
for (const log of oldLogs) {
|
||||
for (const log of oldEnhancedLogs) {
|
||||
const today = new Date().toISOString().split('T')[0]
|
||||
|
||||
const logKey = `archived-logs/${today}/${log.id}.json`
|
||||
const logData = JSON.stringify(log)
|
||||
// Archive enhanced log with more detailed structure
|
||||
const enhancedLogKey = `archived-enhanced-logs/${today}/${log.id}.json`
|
||||
const enhancedLogData = JSON.stringify({
|
||||
...log,
|
||||
archivedAt: new Date().toISOString(),
|
||||
logType: 'enhanced',
|
||||
})
|
||||
|
||||
try {
|
||||
await getS3Client().send(
|
||||
new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: logKey,
|
||||
Body: logData,
|
||||
Key: enhancedLogKey,
|
||||
Body: enhancedLogData,
|
||||
ContentType: 'application/json',
|
||||
Metadata: {
|
||||
logId: String(log.id),
|
||||
workflowId: String(log.workflowId),
|
||||
executionId: String(log.executionId),
|
||||
logType: 'enhanced',
|
||||
archivedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
results.archived++
|
||||
results.enhancedLogs.archived++
|
||||
|
||||
try {
|
||||
// Delete enhanced log (will cascade to workflowExecutionBlocks due to foreign key)
|
||||
const deleteResult = await db
|
||||
.delete(workflowLogs)
|
||||
.where(eq(workflowLogs.id, log.id))
|
||||
.returning({ id: workflowLogs.id })
|
||||
.delete(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.id, log.id))
|
||||
.returning({ id: workflowExecutionLogs.id })
|
||||
|
||||
if (deleteResult.length > 0) {
|
||||
results.deleted++
|
||||
results.enhancedLogs.deleted++
|
||||
} else {
|
||||
results.deleteFailed++
|
||||
logger.warn(`Failed to delete log ${log.id} after archiving: No rows deleted`)
|
||||
results.enhancedLogs.deleteFailed++
|
||||
logger.warn(
|
||||
`Failed to delete enhanced log ${log.id} after archiving: No rows deleted`
|
||||
)
|
||||
}
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed++
|
||||
logger.error(`Error deleting log ${log.id} after archiving:`, { deleteError })
|
||||
results.enhancedLogs.deleteFailed++
|
||||
logger.error(`Error deleting enhanced log ${log.id} after archiving:`, { deleteError })
|
||||
}
|
||||
} catch (archiveError) {
|
||||
results.archiveFailed++
|
||||
logger.error(`Failed to archive log ${log.id}:`, { archiveError })
|
||||
results.enhancedLogs.archiveFailed++
|
||||
logger.error(`Failed to archive enhanced log ${log.id}:`, { archiveError })
|
||||
}
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMoreLogs = oldLogs.length === BATCH_SIZE
|
||||
hasMoreLogs = oldEnhancedLogs.length === BATCH_SIZE
|
||||
|
||||
logger.info(`Processed batch ${batchesProcessed}: ${oldLogs.length} logs`)
|
||||
logger.info(
|
||||
`Processed enhanced logs batch ${batchesProcessed}: ${oldEnhancedLogs.length} logs`
|
||||
)
|
||||
}
|
||||
|
||||
// Cleanup orphaned snapshots
|
||||
try {
|
||||
const snapshotRetentionDays = Number(env.FREE_PLAN_LOG_RETENTION_DAYS || '7') + 1 // Keep snapshots 1 day longer
|
||||
const cleanedSnapshots = await snapshotService.cleanupOrphanedSnapshots(snapshotRetentionDays)
|
||||
results.snapshots.cleaned = cleanedSnapshots
|
||||
logger.info(`Cleaned up ${cleanedSnapshots} orphaned snapshots`)
|
||||
} catch (snapshotError) {
|
||||
results.snapshots.cleanupFailed = 1
|
||||
logger.error('Error cleaning up orphaned snapshots:', { snapshotError })
|
||||
}
|
||||
|
||||
const timeElapsed = (Date.now() - startTime) / 1000
|
||||
const reachedLimit = batchesProcessed >= MAX_BATCHES && hasMoreLogs
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Processed ${batchesProcessed} batches (${results.total} logs) in ${timeElapsed.toFixed(2)}s${reachedLimit ? ' (batch limit reached)' : ''}`,
|
||||
message: `Processed ${batchesProcessed} enhanced log batches (${results.enhancedLogs.total} logs) in ${timeElapsed.toFixed(2)}s${reachedLimit ? ' (batch limit reached)' : ''}`,
|
||||
results,
|
||||
complete: !hasMoreLogs,
|
||||
batchLimitReached: reachedLimit,
|
||||
|
||||
499
apps/sim/app/api/logs/enhanced/route.ts
Normal file
499
apps/sim/app/api/logs/enhanced/route.ts
Normal file
@@ -0,0 +1,499 @@
|
||||
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowExecutionBlocks, workflowExecutionLogs } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('EnhancedLogsAPI')
|
||||
|
||||
// Helper function to extract block executions from trace spans
|
||||
function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
|
||||
const blockExecutions: any[] = []
|
||||
|
||||
function processSpan(span: any) {
|
||||
if (span.blockId) {
|
||||
blockExecutions.push({
|
||||
id: span.id,
|
||||
blockId: span.blockId,
|
||||
blockName: span.name || '',
|
||||
blockType: span.type,
|
||||
startedAt: span.startTime,
|
||||
endedAt: span.endTime,
|
||||
durationMs: span.duration || 0,
|
||||
status: span.status || 'success',
|
||||
errorMessage: span.output?.error || undefined,
|
||||
inputData: span.input || {},
|
||||
outputData: span.output || {},
|
||||
cost: span.cost || undefined,
|
||||
metadata: {},
|
||||
})
|
||||
}
|
||||
|
||||
// Process children recursively
|
||||
if (span.children && Array.isArray(span.children)) {
|
||||
span.children.forEach(processSpan)
|
||||
}
|
||||
}
|
||||
|
||||
traceSpans.forEach(processSpan)
|
||||
return blockExecutions
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
includeWorkflow: z.coerce.boolean().optional().default(false),
|
||||
includeBlocks: z.coerce.boolean().optional().default(false),
|
||||
limit: z.coerce.number().optional().default(100),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
level: z.string().optional(),
|
||||
workflowIds: z.string().optional(), // Comma-separated list of workflow IDs
|
||||
folderIds: z.string().optional(), // Comma-separated list of folder IDs
|
||||
triggers: z.string().optional(), // Comma-separated list of trigger types
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized enhanced logs access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Get user's workflows
|
||||
const userWorkflows = await db
|
||||
.select({ id: workflow.id, folderId: workflow.folderId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
|
||||
const userWorkflowIds = userWorkflows.map((w) => w.id)
|
||||
|
||||
if (userWorkflowIds.length === 0) {
|
||||
return NextResponse.json({ data: [], total: 0 }, { status: 200 })
|
||||
}
|
||||
|
||||
// Build conditions for enhanced logs
|
||||
let conditions: SQL | undefined = inArray(workflowExecutionLogs.workflowId, userWorkflowIds)
|
||||
|
||||
// Filter by level
|
||||
if (params.level && params.level !== 'all') {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, params.level))
|
||||
}
|
||||
|
||||
// Filter by specific workflow IDs
|
||||
if (params.workflowIds) {
|
||||
const workflowIds = params.workflowIds.split(',').filter(Boolean)
|
||||
const filteredWorkflowIds = workflowIds.filter((id) => userWorkflowIds.includes(id))
|
||||
if (filteredWorkflowIds.length > 0) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
inArray(workflowExecutionLogs.workflowId, filteredWorkflowIds)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by folder IDs
|
||||
if (params.folderIds) {
|
||||
const folderIds = params.folderIds.split(',').filter(Boolean)
|
||||
const workflowsInFolders = userWorkflows
|
||||
.filter((w) => w.folderId && folderIds.includes(w.folderId))
|
||||
.map((w) => w.id)
|
||||
|
||||
if (workflowsInFolders.length > 0) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
inArray(workflowExecutionLogs.workflowId, workflowsInFolders)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by triggers
|
||||
if (params.triggers) {
|
||||
const triggers = params.triggers.split(',').filter(Boolean)
|
||||
if (triggers.length > 0 && !triggers.includes('all')) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by date range
|
||||
if (params.startDate) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
gte(workflowExecutionLogs.startedAt, new Date(params.startDate))
|
||||
)
|
||||
}
|
||||
if (params.endDate) {
|
||||
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
|
||||
}
|
||||
|
||||
// Filter by search query
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions = and(
|
||||
conditions,
|
||||
or(
|
||||
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
|
||||
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const logs = await db
|
||||
.select()
|
||||
.from(workflowExecutionLogs)
|
||||
.where(conditions)
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(params.limit)
|
||||
.offset(params.offset)
|
||||
|
||||
// Get total count for pagination
|
||||
const countResult = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(conditions)
|
||||
|
||||
const count = countResult[0]?.count || 0
|
||||
|
||||
// Get block executions for all workflow executions
|
||||
const executionIds = logs.map((log) => log.executionId)
|
||||
let blockExecutionsByExecution: Record<string, any[]> = {}
|
||||
|
||||
if (executionIds.length > 0) {
|
||||
const blockLogs = await db
|
||||
.select()
|
||||
.from(workflowExecutionBlocks)
|
||||
.where(inArray(workflowExecutionBlocks.executionId, executionIds))
|
||||
.orderBy(workflowExecutionBlocks.startedAt)
|
||||
|
||||
// Group block logs by execution ID
|
||||
blockExecutionsByExecution = blockLogs.reduce(
|
||||
(acc, blockLog) => {
|
||||
if (!acc[blockLog.executionId]) {
|
||||
acc[blockLog.executionId] = []
|
||||
}
|
||||
acc[blockLog.executionId].push({
|
||||
id: blockLog.id,
|
||||
blockId: blockLog.blockId,
|
||||
blockName: blockLog.blockName || '',
|
||||
blockType: blockLog.blockType,
|
||||
startedAt: blockLog.startedAt.toISOString(),
|
||||
endedAt: blockLog.endedAt?.toISOString() || blockLog.startedAt.toISOString(),
|
||||
durationMs: blockLog.durationMs || 0,
|
||||
status: blockLog.status,
|
||||
errorMessage: blockLog.errorMessage || undefined,
|
||||
errorStackTrace: blockLog.errorStackTrace || undefined,
|
||||
inputData: blockLog.inputData,
|
||||
outputData: blockLog.outputData,
|
||||
cost: blockLog.costTotal
|
||||
? {
|
||||
input: Number(blockLog.costInput) || 0,
|
||||
output: Number(blockLog.costOutput) || 0,
|
||||
total: Number(blockLog.costTotal) || 0,
|
||||
tokens: {
|
||||
prompt: blockLog.tokensPrompt || 0,
|
||||
completion: blockLog.tokensCompletion || 0,
|
||||
total: blockLog.tokensTotal || 0,
|
||||
},
|
||||
model: blockLog.modelUsed || '',
|
||||
}
|
||||
: undefined,
|
||||
metadata: blockLog.metadata || {},
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, any[]>
|
||||
)
|
||||
}
|
||||
|
||||
// Create clean trace spans from block executions
|
||||
const createTraceSpans = (blockExecutions: any[]) => {
|
||||
return blockExecutions.map((block, index) => {
|
||||
// For error blocks, include error information in the output
|
||||
let output = block.outputData
|
||||
if (block.status === 'error' && block.errorMessage) {
|
||||
output = {
|
||||
...output,
|
||||
error: block.errorMessage,
|
||||
stackTrace: block.errorStackTrace,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: block.id,
|
||||
name: `Block ${block.blockName || block.blockType} (${block.blockType})`,
|
||||
type: block.blockType,
|
||||
duration: block.durationMs,
|
||||
startTime: block.startedAt,
|
||||
endTime: block.endedAt,
|
||||
status: block.status === 'success' ? 'success' : 'error',
|
||||
blockId: block.blockId,
|
||||
input: block.inputData,
|
||||
output,
|
||||
tokens: block.cost?.tokens?.total || 0,
|
||||
relativeStartMs: index * 100,
|
||||
children: [],
|
||||
toolCalls: [],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Extract cost information from block executions
|
||||
const extractCostSummary = (blockExecutions: any[]) => {
|
||||
let totalCost = 0
|
||||
let totalInputCost = 0
|
||||
let totalOutputCost = 0
|
||||
let totalTokens = 0
|
||||
let totalPromptTokens = 0
|
||||
let totalCompletionTokens = 0
|
||||
const models = new Map()
|
||||
|
||||
blockExecutions.forEach((block) => {
|
||||
if (block.cost) {
|
||||
totalCost += Number(block.cost.total) || 0
|
||||
totalInputCost += Number(block.cost.input) || 0
|
||||
totalOutputCost += Number(block.cost.output) || 0
|
||||
totalTokens += block.cost.tokens?.total || 0
|
||||
totalPromptTokens += block.cost.tokens?.prompt || 0
|
||||
totalCompletionTokens += block.cost.tokens?.completion || 0
|
||||
|
||||
// Track per-model costs
|
||||
if (block.cost.model) {
|
||||
if (!models.has(block.cost.model)) {
|
||||
models.set(block.cost.model, {
|
||||
input: 0,
|
||||
output: 0,
|
||||
total: 0,
|
||||
tokens: { prompt: 0, completion: 0, total: 0 },
|
||||
})
|
||||
}
|
||||
const modelCost = models.get(block.cost.model)
|
||||
modelCost.input += Number(block.cost.input) || 0
|
||||
modelCost.output += Number(block.cost.output) || 0
|
||||
modelCost.total += Number(block.cost.total) || 0
|
||||
modelCost.tokens.prompt += block.cost.tokens?.prompt || 0
|
||||
modelCost.tokens.completion += block.cost.tokens?.completion || 0
|
||||
modelCost.tokens.total += block.cost.tokens?.total || 0
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
total: totalCost,
|
||||
input: totalInputCost,
|
||||
output: totalOutputCost,
|
||||
tokens: {
|
||||
total: totalTokens,
|
||||
prompt: totalPromptTokens,
|
||||
completion: totalCompletionTokens,
|
||||
},
|
||||
models: Object.fromEntries(models), // Convert Map to object for JSON serialization
|
||||
}
|
||||
}
|
||||
|
||||
// Transform to clean enhanced log format
|
||||
const enhancedLogs = logs.map((log) => {
|
||||
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
|
||||
|
||||
// Use stored trace spans from metadata if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.metadata as any)?.traceSpans
|
||||
const traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
|
||||
// Use extracted cost summary if available, otherwise use stored values
|
||||
const costSummary =
|
||||
blockExecutions.length > 0
|
||||
? extractCostSummary(blockExecutions)
|
||||
: {
|
||||
input: Number(log.totalInputCost) || 0,
|
||||
output: Number(log.totalOutputCost) || 0,
|
||||
total: Number(log.totalCost) || 0,
|
||||
tokens: {
|
||||
total: log.totalTokens || 0,
|
||||
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
|
||||
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
|
||||
},
|
||||
models: (log.metadata as any)?.models || {},
|
||||
}
|
||||
|
||||
return {
|
||||
id: log.id,
|
||||
workflowId: log.workflowId,
|
||||
executionId: log.executionId,
|
||||
level: log.level,
|
||||
message: log.message,
|
||||
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
|
||||
trigger: log.trigger,
|
||||
createdAt: log.startedAt.toISOString(),
|
||||
metadata: {
|
||||
totalDuration: log.totalDurationMs,
|
||||
cost: costSummary,
|
||||
blockStats: {
|
||||
total: log.blockCount,
|
||||
success: log.successCount,
|
||||
error: log.errorCount,
|
||||
skipped: log.skippedCount,
|
||||
},
|
||||
traceSpans,
|
||||
blockExecutions,
|
||||
enhanced: true,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
if (params.includeWorkflow) {
|
||||
const workflowIds = [...new Set(logs.map((log) => log.workflowId))]
|
||||
const workflowConditions = inArray(workflow.id, workflowIds)
|
||||
|
||||
const workflowData = await db.select().from(workflow).where(workflowConditions)
|
||||
const workflowMap = new Map(workflowData.map((w) => [w.id, w]))
|
||||
|
||||
const logsWithWorkflow = enhancedLogs.map((log) => ({
|
||||
...log,
|
||||
workflow: workflowMap.get(log.workflowId) || null,
|
||||
}))
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: logsWithWorkflow,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
// Include block execution data if requested
|
||||
if (params.includeBlocks) {
|
||||
const executionIds = logs.map((log) => log.executionId)
|
||||
|
||||
if (executionIds.length > 0) {
|
||||
const blockLogs = await db
|
||||
.select()
|
||||
.from(workflowExecutionBlocks)
|
||||
.where(inArray(workflowExecutionBlocks.executionId, executionIds))
|
||||
.orderBy(workflowExecutionBlocks.startedAt)
|
||||
|
||||
// Group block logs by execution ID
|
||||
const blockLogsByExecution = blockLogs.reduce(
|
||||
(acc, blockLog) => {
|
||||
if (!acc[blockLog.executionId]) {
|
||||
acc[blockLog.executionId] = []
|
||||
}
|
||||
acc[blockLog.executionId].push({
|
||||
id: blockLog.id,
|
||||
blockId: blockLog.blockId,
|
||||
blockName: blockLog.blockName || '',
|
||||
blockType: blockLog.blockType,
|
||||
startedAt: blockLog.startedAt.toISOString(),
|
||||
endedAt: blockLog.endedAt?.toISOString() || blockLog.startedAt.toISOString(),
|
||||
durationMs: blockLog.durationMs || 0,
|
||||
status: blockLog.status,
|
||||
errorMessage: blockLog.errorMessage || undefined,
|
||||
inputData: blockLog.inputData,
|
||||
outputData: blockLog.outputData,
|
||||
cost: blockLog.costTotal
|
||||
? {
|
||||
input: Number(blockLog.costInput) || 0,
|
||||
output: Number(blockLog.costOutput) || 0,
|
||||
total: Number(blockLog.costTotal) || 0,
|
||||
tokens: {
|
||||
prompt: blockLog.tokensPrompt || 0,
|
||||
completion: blockLog.tokensCompletion || 0,
|
||||
total: blockLog.tokensTotal || 0,
|
||||
},
|
||||
model: blockLog.modelUsed || '',
|
||||
}
|
||||
: undefined,
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, any[]>
|
||||
)
|
||||
|
||||
// For executions with no block logs in the database,
|
||||
// extract block executions from stored trace spans in metadata
|
||||
logs.forEach((log) => {
|
||||
if (
|
||||
!blockLogsByExecution[log.executionId] ||
|
||||
blockLogsByExecution[log.executionId].length === 0
|
||||
) {
|
||||
const storedTraceSpans = (log.metadata as any)?.traceSpans
|
||||
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
|
||||
blockLogsByExecution[log.executionId] =
|
||||
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Add block logs to metadata
|
||||
const logsWithBlocks = enhancedLogs.map((log) => ({
|
||||
...log,
|
||||
metadata: {
|
||||
...log.metadata,
|
||||
blockExecutions: blockLogsByExecution[log.executionId] || [],
|
||||
},
|
||||
}))
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: logsWithBlocks,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Return basic logs
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: enhancedLogs,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid enhanced logs request parameters`, {
|
||||
errors: validationError.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationError.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Enhanced logs fetch error`, error)
|
||||
return NextResponse.json({ error: error.message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
import { marketplace } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceInfoAPI')
|
||||
|
||||
@@ -24,8 +24,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Fetch marketplace data for the workflow
|
||||
const marketplaceEntry = await db
|
||||
.select()
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, id))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
import { marketplace, workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceUnpublishAPI')
|
||||
|
||||
@@ -34,13 +34,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Get the marketplace entry using the marketplace ID
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
authorId: schema.marketplace.authorId,
|
||||
name: schema.marketplace.name,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
authorId: marketplace.authorId,
|
||||
name: marketplace.name,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -60,36 +60,33 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
const workflowId = marketplaceEntry.workflowId
|
||||
|
||||
// Verify the workflow exists and belongs to the user
|
||||
const workflow = await db
|
||||
const workflowEntry = await db
|
||||
.select({
|
||||
id: schema.workflow.id,
|
||||
userId: schema.workflow.userId,
|
||||
id: workflow.id,
|
||||
userId: workflow.userId,
|
||||
})
|
||||
.from(schema.workflow)
|
||||
.where(eq(schema.workflow.id, workflowId))
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!workflow) {
|
||||
if (!workflowEntry) {
|
||||
logger.warn(`[${requestId}] Associated workflow not found: ${workflowId}`)
|
||||
// We'll still delete the marketplace entry even if the workflow is missing
|
||||
} else if (workflow.userId !== userId) {
|
||||
} else if (workflowEntry.userId !== userId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workflow ${workflowId} belongs to user ${workflow.userId}, not current user ${userId}`
|
||||
`[${requestId}] Workflow ${workflowId} belongs to user ${workflowEntry.userId}, not current user ${userId}`
|
||||
)
|
||||
return createErrorResponse('You do not have permission to unpublish this workflow', 403)
|
||||
}
|
||||
|
||||
try {
|
||||
// Delete the marketplace entry - this is the primary action
|
||||
await db.delete(schema.marketplace).where(eq(schema.marketplace.id, id))
|
||||
await db.delete(marketplace).where(eq(marketplace.id, id))
|
||||
|
||||
// Update the workflow to mark it as unpublished if it exists
|
||||
if (workflow) {
|
||||
await db
|
||||
.update(schema.workflow)
|
||||
.set({ isPublished: false })
|
||||
.where(eq(schema.workflow.id, workflowId))
|
||||
if (workflowEntry) {
|
||||
await db.update(workflow).set({ isPublished: false }).where(eq(workflow.id, workflowId))
|
||||
}
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
import { marketplace } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceViewAPI')
|
||||
|
||||
@@ -22,10 +22,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Find the marketplace entry for this marketplace ID
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
id: marketplace.id,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -36,11 +36,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
// Increment the view count for this workflow
|
||||
await db
|
||||
.update(schema.marketplace)
|
||||
.update(marketplace)
|
||||
.set({
|
||||
views: sql`${schema.marketplace.views} + 1`,
|
||||
views: sql`${marketplace.views} + 1`,
|
||||
})
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.where(eq(marketplace.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Incremented view count for marketplace entry: ${id}`)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { CATEGORIES } from '@/app/workspace/[workspaceId]/marketplace/constants/categories'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
import { marketplace } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceWorkflowsAPI')
|
||||
|
||||
@@ -50,39 +50,39 @@ export async function GET(request: NextRequest) {
|
||||
// Query with state included
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
state: schema.marketplace.state,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
state: marketplace.state,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, workflowId))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
} else {
|
||||
// Query without state
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, workflowId))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
}
|
||||
@@ -114,39 +114,39 @@ export async function GET(request: NextRequest) {
|
||||
// Query with state included
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
state: schema.marketplace.state,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
state: marketplace.state,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, marketplaceId))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, marketplaceId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
} else {
|
||||
// Query without state
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, marketplaceId))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, marketplaceId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
}
|
||||
@@ -183,21 +183,19 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
// Define common fields to select
|
||||
const baseFields = {
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
}
|
||||
|
||||
// Add state if requested
|
||||
const selectFields = includeState
|
||||
? { ...baseFields, state: schema.marketplace.state }
|
||||
: baseFields
|
||||
const selectFields = includeState ? { ...baseFields, state: marketplace.state } : baseFields
|
||||
|
||||
// Determine which sections to fetch
|
||||
const sections = sectionParam ? sectionParam.split(',') : ['popular', 'recent', 'byCategory']
|
||||
@@ -206,8 +204,8 @@ export async function GET(request: NextRequest) {
|
||||
if (sections.includes('popular')) {
|
||||
result.popular = await db
|
||||
.select(selectFields)
|
||||
.from(schema.marketplace)
|
||||
.orderBy(desc(schema.marketplace.views))
|
||||
.from(marketplace)
|
||||
.orderBy(desc(marketplace.views))
|
||||
.limit(limit)
|
||||
}
|
||||
|
||||
@@ -215,8 +213,8 @@ export async function GET(request: NextRequest) {
|
||||
if (sections.includes('recent')) {
|
||||
result.recent = await db
|
||||
.select(selectFields)
|
||||
.from(schema.marketplace)
|
||||
.orderBy(desc(schema.marketplace.createdAt))
|
||||
.from(marketplace)
|
||||
.orderBy(desc(marketplace.createdAt))
|
||||
.limit(limit)
|
||||
}
|
||||
|
||||
@@ -255,9 +253,9 @@ export async function GET(request: NextRequest) {
|
||||
categoriesToFetch.map(async (categoryValue) => {
|
||||
const categoryItems = await db
|
||||
.select(selectFields)
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.category, categoryValue))
|
||||
.orderBy(desc(schema.marketplace.views))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.category, categoryValue))
|
||||
.orderBy(desc(marketplace.views))
|
||||
.limit(limit)
|
||||
|
||||
// Always add the category to the result, even if empty
|
||||
@@ -328,10 +326,10 @@ export async function POST(request: NextRequest) {
|
||||
// Find the marketplace entry
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: schema.marketplace.id,
|
||||
id: marketplace.id,
|
||||
})
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -342,11 +340,11 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Increment the view count
|
||||
await db
|
||||
.update(schema.marketplace)
|
||||
.update(marketplace)
|
||||
.set({
|
||||
views: sql`${schema.marketplace.views} + 1`,
|
||||
views: sql`${marketplace.views} + 1`,
|
||||
})
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.where(eq(marketplace.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Incremented view count for marketplace entry: ${id}`)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
@@ -40,7 +40,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const memories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.orderBy(memory.createdAt)
|
||||
.limit(1)
|
||||
|
||||
@@ -112,7 +112,7 @@ export async function DELETE(
|
||||
const existingMemory = await db
|
||||
.select({ id: memory.id })
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingMemory.length === 0) {
|
||||
@@ -128,14 +128,8 @@ export async function DELETE(
|
||||
)
|
||||
}
|
||||
|
||||
// Soft delete by setting deletedAt timestamp
|
||||
await db
|
||||
.update(memory)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
// Hard delete the memory
|
||||
await db.delete(memory).where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
|
||||
logger.info(`[${requestId}] Memory deleted successfully: ${id} for workflow: ${workflowId}`)
|
||||
return NextResponse.json(
|
||||
@@ -202,7 +196,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
const existingMemories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingMemories.length === 0) {
|
||||
@@ -250,13 +244,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
// Update the memory with new data
|
||||
await db
|
||||
.update(memory)
|
||||
.set({
|
||||
data,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
await db.delete(memory).where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
|
||||
// Fetch the updated memory
|
||||
const updatedMemories = await db
|
||||
|
||||
506
apps/sim/app/api/organizations/[id]/invitations/route.ts
Normal file
506
apps/sim/app/api/organizations/[id]/invitations/route.ts
Normal file
@@ -0,0 +1,506 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import {
|
||||
getEmailSubject,
|
||||
renderBatchInvitationEmail,
|
||||
renderInvitationEmail,
|
||||
} from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
validateBulkInvitations,
|
||||
validateSeatAvailability,
|
||||
} from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { validateAndNormalizeEmail } from '@/lib/email/utils'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { invitation, member, organization, user, workspace, workspaceInvitation } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationInvitationsAPI')
|
||||
|
||||
interface WorkspaceInvitation {
|
||||
workspaceId: string
|
||||
permission: 'admin' | 'write' | 'read'
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/invitations
|
||||
* Get all pending invitations for an organization
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Get all pending invitations for the organization
|
||||
const invitations = await db
|
||||
.select({
|
||||
id: invitation.id,
|
||||
email: invitation.email,
|
||||
role: invitation.role,
|
||||
status: invitation.status,
|
||||
expiresAt: invitation.expiresAt,
|
||||
createdAt: invitation.createdAt,
|
||||
inviterName: user.name,
|
||||
inviterEmail: user.email,
|
||||
})
|
||||
.from(invitation)
|
||||
.leftJoin(user, eq(invitation.inviterId, user.id))
|
||||
.where(eq(invitation.organizationId, organizationId))
|
||||
.orderBy(invitation.createdAt)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
invitations,
|
||||
userRole,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization invitations', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/organizations/[id]/invitations
|
||||
* Create organization invitations with optional validation and batch workspace invitations
|
||||
* Query parameters:
|
||||
* - ?validate=true - Only validate, don't send invitations
|
||||
* - ?batch=true - Include workspace invitations
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const validateOnly = url.searchParams.get('validate') === 'true'
|
||||
const isBatch = url.searchParams.get('batch') === 'true'
|
||||
|
||||
const body = await request.json()
|
||||
const { email, emails, role = 'member', workspaceInvitations } = body
|
||||
|
||||
// Handle single invitation vs batch
|
||||
const invitationEmails = email ? [email] : emails
|
||||
|
||||
// Validate input
|
||||
if (!invitationEmails || !Array.isArray(invitationEmails) || invitationEmails.length === 0) {
|
||||
return NextResponse.json({ error: 'Email or emails array is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!['member', 'admin'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Handle validation-only requests
|
||||
if (validateOnly) {
|
||||
const validationResult = await validateBulkInvitations(organizationId, invitationEmails)
|
||||
|
||||
logger.info('Invitation validation completed', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
emailCount: invitationEmails.length,
|
||||
result: validationResult,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: validationResult,
|
||||
validatedBy: session.user.id,
|
||||
validatedAt: new Date().toISOString(),
|
||||
})
|
||||
}
|
||||
|
||||
// Validate seat availability
|
||||
const seatValidation = await validateSeatAvailability(organizationId, invitationEmails.length)
|
||||
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: seatValidation.reason,
|
||||
seatInfo: {
|
||||
currentSeats: seatValidation.currentSeats,
|
||||
maxSeats: seatValidation.maxSeats,
|
||||
availableSeats: seatValidation.availableSeats,
|
||||
seatsRequested: invitationEmails.length,
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization details
|
||||
const organizationEntry = await db
|
||||
.select({ name: organization.name })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (organizationEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Validate and normalize emails
|
||||
const processedEmails = invitationEmails
|
||||
.map((email: string) => {
|
||||
const result = validateAndNormalizeEmail(email)
|
||||
return result.isValid ? result.normalized : null
|
||||
})
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
if (processedEmails.length === 0) {
|
||||
return NextResponse.json({ error: 'No valid emails provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Handle batch workspace invitations if provided
|
||||
const validWorkspaceInvitations: WorkspaceInvitation[] = []
|
||||
if (isBatch && workspaceInvitations && workspaceInvitations.length > 0) {
|
||||
for (const wsInvitation of workspaceInvitations) {
|
||||
// Check if user has admin permission on this workspace
|
||||
const canInvite = await hasWorkspaceAdminAccess(session.user.id, wsInvitation.workspaceId)
|
||||
|
||||
if (!canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `You don't have permission to invite users to workspace ${wsInvitation.workspaceId}`,
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
validWorkspaceInvitations.push(wsInvitation)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for existing members
|
||||
const existingMembers = await db
|
||||
.select({ userEmail: user.email })
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const existingEmails = existingMembers.map((m) => m.userEmail)
|
||||
const newEmails = processedEmails.filter((email: string) => !existingEmails.includes(email))
|
||||
|
||||
// Check for existing pending invitations
|
||||
const existingInvitations = await db
|
||||
.select({ email: invitation.email })
|
||||
.from(invitation)
|
||||
.where(and(eq(invitation.organizationId, organizationId), eq(invitation.status, 'pending')))
|
||||
|
||||
const pendingEmails = existingInvitations.map((i) => i.email)
|
||||
const emailsToInvite = newEmails.filter((email: string) => !pendingEmails.includes(email))
|
||||
|
||||
if (emailsToInvite.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'All emails are already members or have pending invitations',
|
||||
details: {
|
||||
existingMembers: processedEmails.filter((email: string) =>
|
||||
existingEmails.includes(email)
|
||||
),
|
||||
pendingInvitations: processedEmails.filter((email: string) =>
|
||||
pendingEmails.includes(email)
|
||||
),
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create invitations
|
||||
const expiresAt = new Date(Date.now() + 7 * 24 * 60 * 60 * 1000) // 7 days
|
||||
const invitationsToCreate = emailsToInvite.map((email: string) => ({
|
||||
id: randomUUID(),
|
||||
email,
|
||||
inviterId: session.user.id,
|
||||
organizationId,
|
||||
role,
|
||||
status: 'pending' as const,
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
}))
|
||||
|
||||
await db.insert(invitation).values(invitationsToCreate)
|
||||
|
||||
// Create workspace invitations if batch mode
|
||||
const workspaceInvitationIds: string[] = []
|
||||
if (isBatch && validWorkspaceInvitations.length > 0) {
|
||||
for (const email of emailsToInvite) {
|
||||
for (const wsInvitation of validWorkspaceInvitations) {
|
||||
const wsInvitationId = randomUUID()
|
||||
const token = randomUUID()
|
||||
|
||||
await db.insert(workspaceInvitation).values({
|
||||
id: wsInvitationId,
|
||||
workspaceId: wsInvitation.workspaceId,
|
||||
email,
|
||||
inviterId: session.user.id,
|
||||
role: 'member',
|
||||
status: 'pending',
|
||||
token,
|
||||
permissions: wsInvitation.permission,
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
workspaceInvitationIds.push(wsInvitationId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send invitation emails
|
||||
const inviter = await db
|
||||
.select({ name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
for (const email of emailsToInvite) {
|
||||
const orgInvitation = invitationsToCreate.find((inv) => inv.email === email)
|
||||
if (!orgInvitation) continue
|
||||
|
||||
let emailResult
|
||||
if (isBatch && validWorkspaceInvitations.length > 0) {
|
||||
// Get workspace details for batch email
|
||||
const workspaceDetails = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(
|
||||
inArray(
|
||||
workspace.id,
|
||||
validWorkspaceInvitations.map((w) => w.workspaceId)
|
||||
)
|
||||
)
|
||||
|
||||
const workspaceInvitationsWithNames = validWorkspaceInvitations.map((wsInv) => ({
|
||||
workspaceId: wsInv.workspaceId,
|
||||
workspaceName:
|
||||
workspaceDetails.find((w) => w.id === wsInv.workspaceId)?.name || 'Unknown Workspace',
|
||||
permission: wsInv.permission,
|
||||
}))
|
||||
|
||||
const emailHtml = await renderBatchInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
role,
|
||||
workspaceInvitationsWithNames,
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${orgInvitation.id}`
|
||||
)
|
||||
|
||||
emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: getEmailSubject('batch-invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
} else {
|
||||
const emailHtml = await renderInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${orgInvitation.id}`,
|
||||
email
|
||||
)
|
||||
|
||||
emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: getEmailSubject('invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
}
|
||||
|
||||
if (!emailResult.success) {
|
||||
logger.error('Failed to send invitation email', {
|
||||
email,
|
||||
error: emailResult.message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Organization invitations created', {
|
||||
organizationId,
|
||||
invitedBy: session.user.id,
|
||||
invitationCount: invitationsToCreate.length,
|
||||
emails: emailsToInvite,
|
||||
role,
|
||||
isBatch,
|
||||
workspaceInvitationCount: workspaceInvitationIds.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${invitationsToCreate.length} invitation(s) sent successfully`,
|
||||
data: {
|
||||
invitationsSent: invitationsToCreate.length,
|
||||
invitedEmails: emailsToInvite,
|
||||
existingMembers: processedEmails.filter((email: string) => existingEmails.includes(email)),
|
||||
pendingInvitations: processedEmails.filter((email: string) =>
|
||||
pendingEmails.includes(email)
|
||||
),
|
||||
invalidEmails: invitationEmails.filter(
|
||||
(email: string) => !validateAndNormalizeEmail(email)
|
||||
),
|
||||
workspaceInvitations: isBatch ? validWorkspaceInvitations.length : 0,
|
||||
seatInfo: {
|
||||
seatsUsed: seatValidation.currentSeats + invitationsToCreate.length,
|
||||
maxSeats: seatValidation.maxSeats,
|
||||
availableSeats: seatValidation.availableSeats - invitationsToCreate.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to create organization invitations', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/organizations/[id]/invitations?invitationId=...
|
||||
* Cancel a pending invitation
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const invitationId = url.searchParams.get('invitationId')
|
||||
|
||||
if (!invitationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invitation ID is required as query parameter' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Cancel the invitation
|
||||
const result = await db
|
||||
.update(invitation)
|
||||
.set({
|
||||
status: 'cancelled',
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.id, invitationId),
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (result.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invitation not found or already processed' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info('Organization invitation cancelled', {
|
||||
organizationId,
|
||||
invitationId,
|
||||
cancelledBy: session.user.id,
|
||||
email: result[0].email,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Invitation cancelled successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to cancel organization invitation', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
314
apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts
Normal file
314
apps/sim/app/api/organizations/[id]/members/[memberId]/route.ts
Normal file
@@ -0,0 +1,314 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, user, userStats } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationMemberAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/members/[memberId]
|
||||
* Get individual organization member details
|
||||
*/
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeUsage = url.searchParams.get('include') === 'usage'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = userMember[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
// Get target member details
|
||||
const memberQuery = db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
const memberEntry = await memberQuery
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user can view this member's details
|
||||
const canViewDetails = hasAdminAccess || session.user.id === memberId
|
||||
|
||||
if (!canViewDetails) {
|
||||
return NextResponse.json({ error: 'Forbidden - Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
let memberData = memberEntry[0]
|
||||
|
||||
// Include usage data if requested and user has permission
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const usageData = await db
|
||||
.select({
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
lastPeriodCost: userStats.lastPeriodCost,
|
||||
})
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, memberId))
|
||||
.limit(1)
|
||||
|
||||
if (usageData.length > 0) {
|
||||
memberData = {
|
||||
...memberData,
|
||||
usage: usageData[0],
|
||||
} as typeof memberData & { usage: (typeof usageData)[0] }
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: memberData,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization member', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/organizations/[id]/members/[memberId]
|
||||
* Update organization member role
|
||||
*/
|
||||
export async function PUT(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
const { role } = await request.json()
|
||||
|
||||
// Validate input
|
||||
if (!role || !['admin', 'member'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(userMember[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if target member exists
|
||||
const targetMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
if (targetMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Prevent changing owner role
|
||||
if (targetMember[0].role === 'owner') {
|
||||
return NextResponse.json({ error: 'Cannot change owner role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Prevent non-owners from promoting to admin
|
||||
if (role === 'admin' && userMember[0].role !== 'owner') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only owners can promote members to admin' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Update member role
|
||||
const updatedMember = await db
|
||||
.update(member)
|
||||
.set({ role })
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.returning()
|
||||
|
||||
if (updatedMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Failed to update member role' }, { status: 500 })
|
||||
}
|
||||
|
||||
logger.info('Organization member role updated', {
|
||||
organizationId,
|
||||
memberId,
|
||||
newRole: role,
|
||||
updatedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Member role updated successfully',
|
||||
data: {
|
||||
id: updatedMember[0].id,
|
||||
userId: updatedMember[0].userId,
|
||||
role: updatedMember[0].role,
|
||||
updatedBy: session.user.id,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update organization member role', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/organizations/[id]/members/[memberId]
|
||||
* Remove member from organization
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
|
||||
// Verify user has admin access
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const canRemoveMembers =
|
||||
['owner', 'admin'].includes(userMember[0].role) || session.user.id === memberId
|
||||
|
||||
if (!canRemoveMembers) {
|
||||
return NextResponse.json({ error: 'Forbidden - Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if target member exists
|
||||
const targetMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
if (targetMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Prevent removing the owner
|
||||
if (targetMember[0].role === 'owner') {
|
||||
return NextResponse.json({ error: 'Cannot remove organization owner' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Remove member
|
||||
const removedMember = await db
|
||||
.delete(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.returning()
|
||||
|
||||
if (removedMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Failed to remove member' }, { status: 500 })
|
||||
}
|
||||
|
||||
logger.info('Organization member removed', {
|
||||
organizationId,
|
||||
removedMemberId: memberId,
|
||||
removedBy: session.user.id,
|
||||
wasSelfRemoval: session.user.id === memberId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message:
|
||||
session.user.id === memberId
|
||||
? 'You have left the organization'
|
||||
: 'Member removed successfully',
|
||||
data: {
|
||||
removedMemberId: memberId,
|
||||
removedBy: session.user.id,
|
||||
removedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove organization member', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
293
apps/sim/app/api/organizations/[id]/members/route.ts
Normal file
293
apps/sim/app/api/organizations/[id]/members/route.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { validateAndNormalizeEmail } from '@/lib/email/utils'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { invitation, member, organization, user, userStats } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationMembersAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/members
|
||||
* Get organization members with optional usage data
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeUsage = url.searchParams.get('include') === 'usage'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
// Get organization members
|
||||
const query = db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
// Include usage data if requested and user has admin access
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const membersWithUsage = await db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.leftJoin(userStats, eq(user.id, userStats.userId))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: membersWithUsage,
|
||||
total: membersWithUsage.length,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
}
|
||||
|
||||
const members = await query
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: members,
|
||||
total: members.length,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization members', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/organizations/[id]/members
|
||||
* Invite new member to organization
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const { email, role = 'member' } = await request.json()
|
||||
|
||||
// Validate input
|
||||
if (!email) {
|
||||
return NextResponse.json({ error: 'Email is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!['admin', 'member'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate and normalize email
|
||||
const { isValid, normalized: normalizedEmail } = validateAndNormalizeEmail(email)
|
||||
if (!isValid) {
|
||||
return NextResponse.json({ error: 'Invalid email format' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check seat availability
|
||||
const seatValidation = await validateSeatAvailability(organizationId, 1)
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Cannot invite member. Using ${seatValidation.currentSeats} of ${seatValidation.maxSeats} seats.`,
|
||||
details: seatValidation,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user is already a member
|
||||
const existingUser = await db
|
||||
.select({ id: user.id })
|
||||
.from(user)
|
||||
.where(eq(user.email, normalizedEmail))
|
||||
.limit(1)
|
||||
|
||||
if (existingUser.length > 0) {
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(eq(member.organizationId, organizationId), eq(member.userId, existingUser[0].id))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMember.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'User is already a member of this organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for existing pending invitation
|
||||
const existingInvitation = await db
|
||||
.select()
|
||||
.from(invitation)
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.email, normalizedEmail),
|
||||
eq(invitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingInvitation.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Pending invitation already exists for this email' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create invitation
|
||||
const invitationId = randomUUID()
|
||||
const expiresAt = new Date()
|
||||
expiresAt.setDate(expiresAt.getDate() + 7) // 7 days expiry
|
||||
|
||||
await db.insert(invitation).values({
|
||||
id: invitationId,
|
||||
email: normalizedEmail,
|
||||
inviterId: session.user.id,
|
||||
organizationId,
|
||||
role,
|
||||
status: 'pending',
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
|
||||
const organizationEntry = await db
|
||||
.select({ name: organization.name })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
const inviter = await db
|
||||
.select({ name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
const emailHtml = await renderInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${invitationId}`,
|
||||
normalizedEmail
|
||||
)
|
||||
|
||||
const emailResult = await sendEmail({
|
||||
to: normalizedEmail,
|
||||
subject: getEmailSubject('invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
|
||||
if (emailResult.success) {
|
||||
logger.info('Member invitation sent', {
|
||||
email: normalizedEmail,
|
||||
organizationId,
|
||||
invitationId,
|
||||
role,
|
||||
})
|
||||
} else {
|
||||
logger.error('Failed to send invitation email', {
|
||||
email: normalizedEmail,
|
||||
error: emailResult.message,
|
||||
})
|
||||
// Don't fail the request if email fails
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Invitation sent to ${normalizedEmail}`,
|
||||
data: {
|
||||
invitationId,
|
||||
email: normalizedEmail,
|
||||
role,
|
||||
expiresAt,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to invite organization member', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
248
apps/sim/app/api/organizations/[id]/route.ts
Normal file
248
apps/sim/app/api/organizations/[id]/route.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import { and, eq, ne } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
getOrganizationSeatAnalytics,
|
||||
getOrganizationSeatInfo,
|
||||
updateOrganizationSeats,
|
||||
} from '@/lib/billing/validation/seat-management'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, organization } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]
|
||||
* Get organization details including settings and seat information
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeSeats = url.searchParams.get('include') === 'seats'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization data
|
||||
const organizationEntry = await db
|
||||
.select()
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (organizationEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
const response: any = {
|
||||
success: true,
|
||||
data: {
|
||||
id: organizationEntry[0].id,
|
||||
name: organizationEntry[0].name,
|
||||
slug: organizationEntry[0].slug,
|
||||
logo: organizationEntry[0].logo,
|
||||
metadata: organizationEntry[0].metadata,
|
||||
createdAt: organizationEntry[0].createdAt,
|
||||
updatedAt: organizationEntry[0].updatedAt,
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
}
|
||||
|
||||
// Include seat information if requested
|
||||
if (includeSeats) {
|
||||
const seatInfo = await getOrganizationSeatInfo(organizationId)
|
||||
if (seatInfo) {
|
||||
response.data.seats = seatInfo
|
||||
}
|
||||
|
||||
// Include analytics for admins
|
||||
if (hasAdminAccess) {
|
||||
const analytics = await getOrganizationSeatAnalytics(organizationId)
|
||||
if (analytics) {
|
||||
response.data.seatAnalytics = analytics
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/organizations/[id]
|
||||
* Update organization settings or seat count
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const body = await request.json()
|
||||
const { name, slug, logo, seats } = body
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Handle seat count update
|
||||
if (seats !== undefined) {
|
||||
if (typeof seats !== 'number' || seats < 1) {
|
||||
return NextResponse.json({ error: 'Invalid seat count' }, { status: 400 })
|
||||
}
|
||||
|
||||
const result = await updateOrganizationSeats(organizationId, seats, session.user.id)
|
||||
|
||||
if (!result.success) {
|
||||
return NextResponse.json({ error: result.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Organization seat count updated', {
|
||||
organizationId,
|
||||
newSeatCount: seats,
|
||||
updatedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Seat count updated successfully',
|
||||
data: {
|
||||
seats: seats,
|
||||
updatedBy: session.user.id,
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Handle settings update
|
||||
if (name !== undefined || slug !== undefined || logo !== undefined) {
|
||||
// Validate required fields
|
||||
if (name !== undefined && (!name || typeof name !== 'string' || name.trim().length === 0)) {
|
||||
return NextResponse.json({ error: 'Organization name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (slug !== undefined && (!slug || typeof slug !== 'string' || slug.trim().length === 0)) {
|
||||
return NextResponse.json({ error: 'Organization slug is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate slug format
|
||||
if (slug !== undefined) {
|
||||
const slugRegex = /^[a-z0-9-_]+$/
|
||||
if (!slugRegex.test(slug)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Slug can only contain lowercase letters, numbers, hyphens, and underscores',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if slug is already taken by another organization
|
||||
const existingSlug = await db
|
||||
.select()
|
||||
.from(organization)
|
||||
.where(and(eq(organization.slug, slug), ne(organization.id, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingSlug.length > 0) {
|
||||
return NextResponse.json({ error: 'This slug is already taken' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
// Build update object with only provided fields
|
||||
const updateData: any = { updatedAt: new Date() }
|
||||
if (name !== undefined) updateData.name = name.trim()
|
||||
if (slug !== undefined) updateData.slug = slug.trim()
|
||||
if (logo !== undefined) updateData.logo = logo || null
|
||||
|
||||
// Update organization
|
||||
const updatedOrg = await db
|
||||
.update(organization)
|
||||
.set(updateData)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.returning()
|
||||
|
||||
if (updatedOrg.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info('Organization settings updated', {
|
||||
organizationId,
|
||||
updatedBy: session.user.id,
|
||||
changes: { name, slug, logo },
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Organization updated successfully',
|
||||
data: {
|
||||
id: updatedOrg[0].id,
|
||||
name: updatedOrg[0].name,
|
||||
slug: updatedOrg[0].slug,
|
||||
logo: updatedOrg[0].logo,
|
||||
updatedAt: updatedOrg[0].updatedAt,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'No valid fields provided for update' }, { status: 400 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to update organization', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE method removed - organization deletion not implemented
|
||||
// If deletion is needed in the future, it should be implemented with proper
|
||||
// cleanup of subscriptions, members, workspaces, and billing data
|
||||
209
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
209
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, permissions, user, workspace, workspaceMember } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/workspaces
|
||||
* Get workspaces related to the organization with optional filtering
|
||||
* Query parameters:
|
||||
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||
* - ?member=userId - Workspaces where specific member has access
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const availableOnly = url.searchParams.get('available') === 'true'
|
||||
const memberId = url.searchParams.get('member')
|
||||
|
||||
// Verify user is a member of this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Forbidden - Not a member of this organization',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (availableOnly) {
|
||||
// Get workspaces where user has admin permissions (can invite others)
|
||||
const availableWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, session.user.id),
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// User owns the workspace
|
||||
eq(workspace.ownerId, session.user.id),
|
||||
// User has admin permission on the workspace
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.permissionType, 'admin')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
// Filter and format the results
|
||||
const workspacesWithInvitePermission = availableWorkspaces
|
||||
.filter((workspace) => {
|
||||
// Include if user owns the workspace OR has admin permission
|
||||
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||
})
|
||||
.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
canInvite: true, // All returned workspaces have invite permission
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
logger.info('Retrieved available workspaces for organization member', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
workspaceCount: workspacesWithInvitePermission.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: workspacesWithInvitePermission,
|
||||
totalCount: workspacesWithInvitePermission.length,
|
||||
filter: 'available',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (memberId && hasAdminAccess) {
|
||||
// Get workspaces where specific member has access (admin only)
|
||||
const memberWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, memberId),
|
||||
permissionType: permissions.permissionType,
|
||||
joinedAt: workspaceMember.joinedAt,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, memberId)
|
||||
)
|
||||
)
|
||||
.leftJoin(
|
||||
workspaceMember,
|
||||
and(eq(workspaceMember.workspaceId, workspace.id), eq(workspaceMember.userId, memberId))
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// Member owns the workspace
|
||||
eq(workspace.ownerId, memberId),
|
||||
// Member has permissions on the workspace
|
||||
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||
)
|
||||
)
|
||||
|
||||
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
permission: workspace.permissionType,
|
||||
joinedAt: workspace.joinedAt,
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: formattedWorkspaces,
|
||||
totalCount: formattedWorkspaces.length,
|
||||
filter: 'member',
|
||||
memberId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Default: Get all workspaces (basic info only for regular members)
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: [],
|
||||
totalCount: 0,
|
||||
message: 'Workspace access information is only available to organization admins',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// For admins: Get summary of all workspaces
|
||||
const allWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
ownerName: user.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: allWorkspaces,
|
||||
totalCount: allWorkspaces.length,
|
||||
filter: 'all',
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization workspaces', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user