mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 07:58:06 -05:00
Compare commits
27 Commits
v0.2.12
...
feat/aws-l
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f553667242 | ||
|
|
0c753c4394 | ||
|
|
2ac203e233 | ||
|
|
d29692ede4 | ||
|
|
f60232fa5b | ||
|
|
4ceec7ff9a | ||
|
|
0ff86a1413 | ||
|
|
4886e5aae8 | ||
|
|
6274bdcb18 | ||
|
|
7064f69520 | ||
|
|
154d8a674a | ||
|
|
a6e144ad93 | ||
|
|
d0514a39a8 | ||
|
|
ee66cd262b | ||
|
|
5aab24e1ed | ||
|
|
689d88fd7e | ||
|
|
b1047503b9 | ||
|
|
ec1eec4546 | ||
|
|
2b3989edd2 | ||
|
|
cb393c1638 | ||
|
|
c82e5ac3b3 | ||
|
|
67030d9576 | ||
|
|
8c157083bc | ||
|
|
6f07c2958e | ||
|
|
be100e4f86 | ||
|
|
46be9e3558 | ||
|
|
abf1ac06ce |
@@ -1 +1 @@
|
||||
bunx lint-staged
|
||||
bun lint
|
||||
@@ -88,8 +88,9 @@ For security and performance reasons, function execution has certain limitations
|
||||
|
||||
### Outputs
|
||||
|
||||
- **result**: The value returned by your function
|
||||
- **stdout**: Any console output from your function
|
||||
- **Result**: The value returned by your function
|
||||
- **Standard Output**: Any console output from your function
|
||||
- **Execution Time**: The time taken to execute your function (in milliseconds)
|
||||
|
||||
## Example Usage
|
||||
|
||||
|
||||
@@ -115,9 +115,14 @@ Headers are configured as key-value pairs:
|
||||
</Tab>
|
||||
<Tab>
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li><strong>data</strong>: The response body data</li>
|
||||
<li><strong>status</strong>: HTTP status code</li>
|
||||
<li><strong>headers</strong>: Response headers</li>
|
||||
<li>
|
||||
<strong>response</strong>: Complete response object containing:
|
||||
<ul className="list-disc space-y-1 pl-6 mt-2">
|
||||
<li><strong>data</strong>: The response body data</li>
|
||||
<li><strong>status</strong>: HTTP status code</li>
|
||||
<li><strong>headers</strong>: Response headers</li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
@@ -182,9 +182,10 @@ Update multiple existing records in an Airtable table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `records` | json | records output from the block |
|
||||
| `record` | json | record output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `records` | json | records of the response |
|
||||
| ↳ `record` | json | record of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -174,10 +174,11 @@ Manage and render prompts using Autoblocks prompt management system
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `promptId` | string | promptId output from the block |
|
||||
| `version` | string | version output from the block |
|
||||
| `renderedPrompt` | string | renderedPrompt output from the block |
|
||||
| `templates` | json | templates output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `promptId` | string | promptId of the response |
|
||||
| ↳ `version` | string | version of the response |
|
||||
| ↳ `renderedPrompt` | string | renderedPrompt of the response |
|
||||
| ↳ `templates` | json | templates of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -102,10 +102,11 @@ Runs a browser automation task using BrowserUse
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `id` | string | id output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `output` | any | output output from the block |
|
||||
| `steps` | json | steps output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `output` | any | output of the response |
|
||||
| ↳ `steps` | json | steps of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -238,7 +238,8 @@ Populate Clay with data from a JSON file. Enables direct communication and notif
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `data` | any | data output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -113,11 +113,12 @@ Update a Confluence page using the Confluence API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `pageId` | string | pageId output from the block |
|
||||
| `content` | string | content output from the block |
|
||||
| `title` | string | title output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `pageId` | string | pageId of the response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `title` | string | title of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -150,8 +150,9 @@ Retrieve information about a Discord user
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `message` | string | message output from the block |
|
||||
| `data` | any | data output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -80,7 +80,8 @@ Convert TTS using ElevenLabs voices
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `audioUrl` | string | audioUrl output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `audioUrl` | string | audioUrl of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -158,10 +158,11 @@ Get an AI-generated answer to a question with citations from the web using Exa A
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `results` | json | results output from the block |
|
||||
| `similarLinks` | json | similarLinks output from the block |
|
||||
| `answer` | string | answer output from the block |
|
||||
| `citations` | json | citations output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `similarLinks` | json | similarLinks of the response |
|
||||
| ↳ `answer` | string | answer of the response |
|
||||
| ↳ `citations` | json | citations of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -87,8 +87,9 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `files` | json | files output from the block |
|
||||
| `combinedContent` | string | combinedContent output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `files` | json | files of the response |
|
||||
| ↳ `combinedContent` | string | combinedContent of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -111,11 +111,12 @@ Search for information on the web using Firecrawl
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `markdown` | string | markdown output from the block |
|
||||
| `html` | any | html output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `data` | json | data output from the block |
|
||||
| `warning` | any | warning output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `markdown` | string | markdown of the response |
|
||||
| ↳ `html` | any | html of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `warning` | any | warning of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -174,8 +174,9 @@ Retrieve the latest commit from a GitHub repository
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -79,18 +79,19 @@ Send emails using Gmail
|
||||
| `threadId` | string |
|
||||
| `labelIds` | string |
|
||||
|
||||
### `gmail_draft`
|
||||
### `gmail_read`
|
||||
|
||||
Draft emails using Gmail
|
||||
Read emails from Gmail
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessToken` | string | Yes | Access token for Gmail API |
|
||||
| `to` | string | Yes | Recipient email address |
|
||||
| `subject` | string | Yes | Email subject |
|
||||
| `body` | string | Yes | Email body content |
|
||||
| `messageId` | string | No | ID of the message to read |
|
||||
| `folder` | string | No | Folder/label to read emails from |
|
||||
| `unreadOnly` | boolean | No | Only retrieve unread messages |
|
||||
| `maxResults` | number | No | Maximum number of messages to retrieve \(default: 1, max: 10\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -98,19 +99,30 @@ Draft emails using Gmail
|
||||
| --------- | ---- |
|
||||
| `content` | string |
|
||||
| `metadata` | string |
|
||||
| `message` | string |
|
||||
| `threadId` | string |
|
||||
| `labelIds` | string |
|
||||
|
||||
### `gmail_search`
|
||||
|
||||
Search emails in Gmail
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessToken` | string | Yes | Access token for Gmail API |
|
||||
| `query` | string | Yes | Search query for emails |
|
||||
| `maxResults` | number | No | Maximum number of results to return |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `content` | string |
|
||||
|
||||
|
||||
|
||||
## Block Configuration
|
||||
|
||||
### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `operation` | string | Yes | Operation (e.g., 'send', 'draft') |
|
||||
No configuration parameters required.
|
||||
|
||||
|
||||
|
||||
@@ -118,8 +130,9 @@ Draft emails using Gmail
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -228,8 +228,9 @@ Invite attendees to an existing Google Calendar event
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -159,9 +159,10 @@ Create a new Google Docs document
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedContent` | boolean | updatedContent output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedContent` | boolean | updatedContent of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -177,8 +177,9 @@ List files and folders in Google Drive
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `file` | json | file output from the block |
|
||||
| `files` | json | files output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `file` | json | file of the response |
|
||||
| ↳ `files` | json | files of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -101,11 +101,7 @@ Search the web with the Custom Search API
|
||||
|
||||
### Outputs
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `items` | json | items output from the block |
|
||||
| `searchInformation` | json | searchInformation output from the block |
|
||||
|
||||
This block does not produce any outputs.
|
||||
|
||||
## Notes
|
||||
|
||||
|
||||
@@ -212,13 +212,14 @@ Append data to the end of a Google Sheets spreadsheet
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `data` | json | data output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedRange` | string | updatedRange output from the block |
|
||||
| `updatedRows` | number | updatedRows output from the block |
|
||||
| `updatedColumns` | number | updatedColumns output from the block |
|
||||
| `updatedCells` | number | updatedCells output from the block |
|
||||
| `tableRange` | string | tableRange output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedRange` | string | updatedRange of the response |
|
||||
| ↳ `updatedRows` | number | updatedRows of the response |
|
||||
| ↳ `updatedColumns` | number | updatedColumns of the response |
|
||||
| ↳ `updatedCells` | number | updatedCells of the response |
|
||||
| ↳ `tableRange` | string | tableRange of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -107,14 +107,15 @@ Search for guests in Guesty by phone number
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `id` | string | id output from the block |
|
||||
| `guest` | json | guest output from the block |
|
||||
| `checkIn` | string | checkIn output from the block |
|
||||
| `checkOut` | string | checkOut output from the block |
|
||||
| `status` | string | status output from the block |
|
||||
| `listing` | json | listing output from the block |
|
||||
| `money` | json | money output from the block |
|
||||
| `guests` | json | guests output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
| ↳ `guest` | json | guest of the response |
|
||||
| ↳ `checkIn` | string | checkIn of the response |
|
||||
| ↳ `checkOut` | string | checkOut of the response |
|
||||
| ↳ `status` | string | status of the response |
|
||||
| ↳ `listing` | json | listing of the response |
|
||||
| ↳ `money` | json | money of the response |
|
||||
| ↳ `guests` | json | guests of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -115,9 +115,10 @@ Generate completions using Hugging Face Inference API
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -93,9 +93,10 @@ Generate images using OpenAI
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `image` | string | image output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `image` | string | image of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -101,7 +101,8 @@ Extract and process web content into clean, LLM-friendly text using Jina AI Read
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -165,14 +165,15 @@ Retrieve multiple Jira issues in bulk
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `issueKey` | string | issueKey output from the block |
|
||||
| `summary` | string | summary output from the block |
|
||||
| `description` | string | description output from the block |
|
||||
| `created` | string | created output from the block |
|
||||
| `updated` | string | updated output from the block |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `url` | string | url output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `issueKey` | string | issueKey of the response |
|
||||
| ↳ `summary` | string | summary of the response |
|
||||
| ↳ `description` | string | description of the response |
|
||||
| ↳ `created` | string | created of the response |
|
||||
| ↳ `updated` | string | updated of the response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -66,13 +66,6 @@ Search for similar content in one or more knowledge bases using vector similarit
|
||||
| `knowledgeBaseIds` | string | Yes | ID of the knowledge base to search in, or comma-separated IDs for multiple knowledge bases |
|
||||
| `query` | string | Yes | Search query text |
|
||||
| `topK` | number | No | Number of most similar results to return \(1-100\) |
|
||||
| `tag1` | string | No | Filter by tag 1 value |
|
||||
| `tag2` | string | No | Filter by tag 2 value |
|
||||
| `tag3` | string | No | Filter by tag 3 value |
|
||||
| `tag4` | string | No | Filter by tag 4 value |
|
||||
| `tag5` | string | No | Filter by tag 5 value |
|
||||
| `tag6` | string | No | Filter by tag 6 value |
|
||||
| `tag7` | string | No | Filter by tag 7 value |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -118,13 +111,6 @@ Create a new document in a knowledge base
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `name` | string | Yes | Name of the document |
|
||||
| `content` | string | Yes | Content of the document |
|
||||
| `tag1` | string | No | Tag 1 value for the document |
|
||||
| `tag2` | string | No | Tag 2 value for the document |
|
||||
| `tag3` | string | No | Tag 3 value for the document |
|
||||
| `tag4` | string | No | Tag 4 value for the document |
|
||||
| `tag5` | string | No | Tag 5 value for the document |
|
||||
| `tag6` | string | No | Tag 6 value for the document |
|
||||
| `tag7` | string | No | Tag 7 value for the document |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -149,9 +135,10 @@ Create a new document in a knowledge base
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `results` | json | results output from the block |
|
||||
| `query` | string | query output from the block |
|
||||
| `totalResults` | number | totalResults output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `query` | string | query of the response |
|
||||
| ↳ `totalResults` | number | totalResults of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -105,8 +105,9 @@ Create a new issue in Linear
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `issues` | json | issues output from the block |
|
||||
| `issue` | json | issue output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `issues` | json | issues of the response |
|
||||
| ↳ `issue` | json | issue of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -92,8 +92,9 @@ Search the web for information using Linkup
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `answer` | string | answer output from the block |
|
||||
| `sources` | json | sources output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `answer` | string | answer of the response |
|
||||
| ↳ `sources` | json | sources of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -126,9 +126,10 @@ Retrieve memories from Mem0 by ID or filter criteria
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `ids` | any | ids output from the block |
|
||||
| `memories` | any | memories output from the block |
|
||||
| `searchResults` | any | searchResults output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ids` | any | ids of the response |
|
||||
| ↳ `memories` | any | memories of the response |
|
||||
| ↳ `searchResults` | any | searchResults of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -124,8 +124,9 @@ Delete a specific memory by its ID
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `memories` | any | memories output from the block |
|
||||
| `id` | string | id output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `memories` | any | memories of the response |
|
||||
| ↳ `id` | string | id of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -180,14 +180,15 @@ Add new rows to a Microsoft Excel table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `data` | json | data output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedRange` | string | updatedRange output from the block |
|
||||
| `updatedRows` | number | updatedRows output from the block |
|
||||
| `updatedColumns` | number | updatedColumns output from the block |
|
||||
| `updatedCells` | number | updatedCells output from the block |
|
||||
| `index` | number | index output from the block |
|
||||
| `values` | json | values output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedRange` | string | updatedRange of the response |
|
||||
| ↳ `updatedRows` | number | updatedRows of the response |
|
||||
| ↳ `updatedColumns` | number | updatedColumns of the response |
|
||||
| ↳ `updatedCells` | number | updatedCells of the response |
|
||||
| ↳ `index` | number | index of the response |
|
||||
| ↳ `values` | json | values of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -205,9 +205,10 @@ Write or send a message to a Microsoft Teams channel
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `updatedContent` | boolean | updatedContent output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
| ↳ `updatedContent` | boolean | updatedContent of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -122,8 +122,9 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -117,8 +117,9 @@ Create a new page in Notion
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `metadata` | any | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `metadata` | any | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -88,9 +88,10 @@ Generate embeddings from text using OpenAI
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `embeddings` | json | embeddings output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `embeddings` | json | embeddings of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -225,8 +225,9 @@ Read emails from Outlook
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `message` | string | message output from the block |
|
||||
| `results` | json | results output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -83,9 +83,10 @@ Generate completions using Perplexity AI chat models
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `usage` | json | usage output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `usage` | json | usage of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -181,12 +181,13 @@ Fetch vectors by ID from a Pinecone index
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `matches` | any | matches output from the block |
|
||||
| `upsertedCount` | any | upsertedCount output from the block |
|
||||
| `data` | any | data output from the block |
|
||||
| `model` | any | model output from the block |
|
||||
| `vector_type` | any | vector_type output from the block |
|
||||
| `usage` | any | usage output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `matches` | any | matches of the response |
|
||||
| ↳ `upsertedCount` | any | upsertedCount of the response |
|
||||
| ↳ `data` | any | data of the response |
|
||||
| ↳ `model` | any | model of the response |
|
||||
| ↳ `vector_type` | any | vector_type of the response |
|
||||
| ↳ `usage` | any | usage of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -129,10 +129,11 @@ Fetch comments from a specific Reddit post
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `subreddit` | string | subreddit output from the block |
|
||||
| `posts` | json | posts output from the block |
|
||||
| `post` | json | post output from the block |
|
||||
| `comments` | json | comments output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `subreddit` | string | subreddit of the response |
|
||||
| ↳ `posts` | json | posts of the response |
|
||||
| ↳ `post` | json | post of the response |
|
||||
| ↳ `comments` | json | comments of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -89,8 +89,9 @@ Retrieve an object from an AWS S3 bucket
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `url` | string | url output from the block |
|
||||
| `metadata` | json | metadata output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
| ↳ `metadata` | json | metadata of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,7 +121,8 @@ A powerful web search tool that provides access to Google search results through
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `searchResults` | json | searchResults output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `searchResults` | json | searchResults of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -98,8 +98,9 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `ts` | string | ts output from the block |
|
||||
| `channel` | string | channel output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ts` | string | ts of the response |
|
||||
| ↳ `channel` | string | channel of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -232,7 +232,8 @@ Extract structured data from a webpage using Stagehand
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `data` | json | data output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `data` | json | data of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -240,8 +240,9 @@ Run an autonomous web agent to complete tasks and extract structured data
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `agentResult` | json | agentResult output from the block |
|
||||
| `structuredOutput` | any | structuredOutput output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `agentResult` | json | agentResult of the response |
|
||||
| ↳ `structuredOutput` | any | structuredOutput of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -127,8 +127,9 @@ Insert data into a Supabase table
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `message` | string | message output from the block |
|
||||
| `results` | json | results output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `message` | string | message of the response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,12 +121,13 @@ Extract raw content from multiple web pages simultaneously using Tavily
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `results` | json | results output from the block |
|
||||
| `answer` | any | answer output from the block |
|
||||
| `query` | string | query output from the block |
|
||||
| `content` | string | content output from the block |
|
||||
| `title` | string | title output from the block |
|
||||
| `url` | string | url output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `results` | json | results of the response |
|
||||
| ↳ `answer` | any | answer of the response |
|
||||
| ↳ `query` | string | query of the response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `title` | string | title of the response |
|
||||
| ↳ `url` | string | url of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -121,8 +121,9 @@ Send messages to Telegram channels or users through the Telegram Bot API. Enable
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `ok` | boolean | ok output from the block |
|
||||
| `result` | json | result output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `ok` | boolean | ok of the response |
|
||||
| ↳ `result` | json | result of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -87,7 +87,8 @@ Processes a provided thought/instruction, making it available for subsequent ste
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `acknowledgedThought` | string | acknowledgedThought output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `acknowledgedThought` | string | acknowledgedThought of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -95,9 +95,10 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | string | model output from the block |
|
||||
| `tokens` | any | tokens output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | string | model of the response |
|
||||
| ↳ `tokens` | any | tokens of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -78,10 +78,11 @@ Send text messages to single or multiple recipients using the Twilio API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `messageId` | any | messageId output from the block |
|
||||
| `status` | any | status output from the block |
|
||||
| `error` | any | error output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `messageId` | any | messageId of the response |
|
||||
| ↳ `status` | any | status of the response |
|
||||
| ↳ `error` | any | error of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -126,9 +126,10 @@ This tool does not produce any outputs.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `total_items` | number | total_items output from the block |
|
||||
| `page_count` | number | page_count output from the block |
|
||||
| `items` | json | items output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `total_items` | number | total_items of the response |
|
||||
| ↳ `page_count` | number | page_count of the response |
|
||||
| ↳ `items` | json | items of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -90,9 +90,10 @@ Process and analyze images using advanced vision models. Capable of understandin
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `content` | string | content output from the block |
|
||||
| `model` | any | model output from the block |
|
||||
| `tokens` | any | tokens output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `content` | string | content of the response |
|
||||
| ↳ `model` | any | model of the response |
|
||||
| ↳ `tokens` | any | tokens of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -79,9 +79,10 @@ Send WhatsApp messages
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `success` | boolean | success output from the block |
|
||||
| `messageId` | any | messageId output from the block |
|
||||
| `error` | any | error output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `success` | boolean | success of the response |
|
||||
| ↳ `messageId` | any | messageId of the response |
|
||||
| ↳ `error` | any | error of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -145,14 +145,15 @@ Get user profile information
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `tweet` | json | tweet output from the block |
|
||||
| `replies` | any | replies output from the block |
|
||||
| `context` | any | context output from the block |
|
||||
| `tweets` | json | tweets output from the block |
|
||||
| `includes` | any | includes output from the block |
|
||||
| `meta` | json | meta output from the block |
|
||||
| `user` | json | user output from the block |
|
||||
| `recentTweets` | any | recentTweets output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `tweet` | json | tweet of the response |
|
||||
| ↳ `replies` | any | replies of the response |
|
||||
| ↳ `context` | any | context of the response |
|
||||
| ↳ `tweets` | json | tweets of the response |
|
||||
| ↳ `includes` | any | includes of the response |
|
||||
| ↳ `meta` | json | meta of the response |
|
||||
| ↳ `user` | json | user of the response |
|
||||
| ↳ `recentTweets` | any | recentTweets of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -82,8 +82,9 @@ Search for videos on YouTube using the YouTube Data API.
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `items` | json | items output from the block |
|
||||
| `totalResults` | number | totalResults output from the block |
|
||||
| `response` | object | Output from response |
|
||||
| ↳ `items` | json | items of the response |
|
||||
| ↳ `totalResults` | number | totalResults of the response |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -15,3 +15,5 @@ ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate
|
||||
# RESEND_API_KEY= # Uncomment and add your key from https://resend.com to send actual emails
|
||||
# If left commented out, emails will be logged to console instead
|
||||
|
||||
# Freestyle API Key (Required for sandboxed code execution for functions/custom-tools)
|
||||
# FREESTYLE_API_KEY= # Uncomment and add your key from https://docs.freestyle.sh/Getting-Started/run
|
||||
|
||||
116
apps/sim/app/(landing)/components/waitlist-form.tsx
Normal file
116
apps/sim/app/(landing)/components/waitlist-form.tsx
Normal file
@@ -0,0 +1,116 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { z } from 'zod'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
|
||||
const emailSchema = z.string().email('Please enter a valid email')
|
||||
|
||||
export default function WaitlistForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [status, setStatus] = useState<'idle' | 'success' | 'error' | 'exists' | 'ratelimited'>(
|
||||
'idle'
|
||||
)
|
||||
const [_errorMessage, setErrorMessage] = useState('')
|
||||
const [_retryAfter, setRetryAfter] = useState<number | null>(null)
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
setStatus('idle')
|
||||
setErrorMessage('')
|
||||
setRetryAfter(null)
|
||||
|
||||
try {
|
||||
// Validate email
|
||||
emailSchema.parse(email)
|
||||
|
||||
setIsSubmitting(true)
|
||||
const response = await fetch('/api/waitlist', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ email }),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
// Check for rate limiting (429 status)
|
||||
if (response.status === 429) {
|
||||
setStatus('ratelimited')
|
||||
setErrorMessage(data.message || 'Too many attempts. Please try again later.')
|
||||
setRetryAfter(data.retryAfter || 60)
|
||||
}
|
||||
// Check if the error is because the email already exists
|
||||
else if (response.status === 400 && data.message?.includes('already exists')) {
|
||||
setStatus('exists')
|
||||
setErrorMessage('Already on the waitlist')
|
||||
} else {
|
||||
setStatus('error')
|
||||
setErrorMessage(data.message || 'Failed to join waitlist')
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
setStatus('success')
|
||||
setEmail('')
|
||||
} catch (_error) {
|
||||
setStatus('error')
|
||||
setErrorMessage('Please try again')
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const getButtonText = () => {
|
||||
if (isSubmitting) return 'Joining...'
|
||||
if (status === 'success') return 'Joined!'
|
||||
if (status === 'error') return 'Try again'
|
||||
if (status === 'exists') return 'Already joined'
|
||||
if (status === 'ratelimited') return 'Try again later'
|
||||
return 'Join waitlist'
|
||||
}
|
||||
|
||||
const getButtonStyle = () => {
|
||||
switch (status) {
|
||||
case 'success':
|
||||
return 'bg-green-500 hover:bg-green-600'
|
||||
case 'error':
|
||||
return 'bg-red-500 hover:bg-red-600'
|
||||
case 'exists':
|
||||
return 'bg-amber-500 hover:bg-amber-600'
|
||||
case 'ratelimited':
|
||||
return 'bg-gray-500 hover:bg-gray-600'
|
||||
default:
|
||||
return 'bg-white text-black hover:bg-gray-100'
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<form
|
||||
onSubmit={handleSubmit}
|
||||
className='mx-auto mt-8 flex max-w-lg flex-col items-center gap-3'
|
||||
>
|
||||
<div className='flex w-full gap-3'>
|
||||
<Input
|
||||
type='email'
|
||||
placeholder='you@example.com'
|
||||
className='h-[49px] flex-1 rounded-md border-white/20 bg-[#020817] text-sm focus:border-white/30 focus:ring-white/30 md:text-md lg:text-[16px]'
|
||||
value={email}
|
||||
onChange={(e) => setEmail(e.target.value)}
|
||||
disabled={isSubmitting || status === 'ratelimited'}
|
||||
/>
|
||||
<Button
|
||||
type='submit'
|
||||
className={`h-[48px] rounded-md px-8 text-sm md:text-md ${getButtonStyle()}`}
|
||||
disabled={isSubmitting || status === 'ratelimited'}
|
||||
>
|
||||
{getButtonText()}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -619,13 +619,6 @@ export function mockKnowledgeSchemas() {
|
||||
processingCompletedAt: 'processing_completed_at',
|
||||
processingError: 'processing_error',
|
||||
enabled: 'enabled',
|
||||
tag1: 'tag1',
|
||||
tag2: 'tag2',
|
||||
tag3: 'tag3',
|
||||
tag4: 'tag4',
|
||||
tag5: 'tag5',
|
||||
tag6: 'tag6',
|
||||
tag7: 'tag7',
|
||||
uploadedAt: 'uploaded_at',
|
||||
deletedAt: 'deleted_at',
|
||||
},
|
||||
@@ -638,13 +631,6 @@ export function mockKnowledgeSchemas() {
|
||||
embedding: 'embedding',
|
||||
tokenCount: 'token_count',
|
||||
characterCount: 'character_count',
|
||||
tag1: 'tag1',
|
||||
tag2: 'tag2',
|
||||
tag3: 'tag3',
|
||||
tag4: 'tag4',
|
||||
tag5: 'tag5',
|
||||
tag6: 'tag6',
|
||||
tag7: 'tag7',
|
||||
createdAt: 'created_at',
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { processDailyBillingCheck } from '@/lib/billing/core/billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('DailyBillingCron')
|
||||
|
||||
/**
|
||||
* Daily billing CRON job endpoint that checks individual billing periods
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
logger.info('Starting daily billing check cron job')
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
// Process overage billing for users and organizations with periods ending today
|
||||
const result = await processDailyBillingCheck()
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Daily billing check completed successfully', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.error('Daily billing check completed with errors', {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
errors: result.errors,
|
||||
duration: `${duration}ms`,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
summary: {
|
||||
processedUsers: result.processedUsers,
|
||||
processedOrganizations: result.processedOrganizations,
|
||||
totalChargedAmount: result.totalChargedAmount,
|
||||
errorCount: result.errors.length,
|
||||
duration: `${duration}ms`,
|
||||
},
|
||||
errors: result.errors,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in monthly billing cron job', { error })
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Internal server error during daily billing check',
|
||||
details: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for manual testing and health checks
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'daily billing check health check')
|
||||
if (authError) {
|
||||
return authError
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
status: 'ready',
|
||||
message:
|
||||
'Daily billing check cron job is ready to process users and organizations with periods ending today',
|
||||
currentDate: new Date().toISOString().split('T')[0],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error in billing health check', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
status: 'error',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getSimplifiedBillingSummary } from '@/lib/billing/core/billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('UnifiedBillingAPI')
|
||||
|
||||
/**
|
||||
* Unified Billing Endpoint
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
|
||||
try {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const context = searchParams.get('context') || 'user'
|
||||
const contextId = searchParams.get('id')
|
||||
|
||||
// Validate context parameter
|
||||
if (!['user', 'organization'].includes(context)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid context. Must be "user" or "organization"' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// For organization context, require contextId
|
||||
if (context === 'organization' && !contextId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization ID is required when context=organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let billingData
|
||||
|
||||
if (context === 'user') {
|
||||
// Get user billing (may include organization if they're part of one)
|
||||
billingData = await getSimplifiedBillingSummary(session.user.id, contextId || undefined)
|
||||
} else {
|
||||
// Get user role in organization for permission checks first
|
||||
const memberRecord = await db
|
||||
.select({ role: member.role })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, contextId!), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberRecord.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Access denied - not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization-specific billing
|
||||
const rawBillingData = await getOrganizationBillingData(contextId!)
|
||||
|
||||
if (!rawBillingData) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization not found or access denied' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
// Transform data to match component expectations
|
||||
billingData = {
|
||||
organizationId: rawBillingData.organizationId,
|
||||
organizationName: rawBillingData.organizationName,
|
||||
subscriptionPlan: rawBillingData.subscriptionPlan,
|
||||
subscriptionStatus: rawBillingData.subscriptionStatus,
|
||||
totalSeats: rawBillingData.totalSeats,
|
||||
usedSeats: rawBillingData.usedSeats,
|
||||
totalCurrentUsage: rawBillingData.totalCurrentUsage,
|
||||
totalUsageLimit: rawBillingData.totalUsageLimit,
|
||||
averageUsagePerMember: rawBillingData.averageUsagePerMember,
|
||||
billingPeriodStart: rawBillingData.billingPeriodStart?.toISOString() || null,
|
||||
billingPeriodEnd: rawBillingData.billingPeriodEnd?.toISOString() || null,
|
||||
members: rawBillingData.members.map((member) => ({
|
||||
...member,
|
||||
joinedAt: member.joinedAt.toISOString(),
|
||||
lastActive: member.lastActive?.toISOString() || null,
|
||||
})),
|
||||
}
|
||||
|
||||
const userRole = memberRecord[0].role
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
data: billingData,
|
||||
userRole,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
data: billingData,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get billing data', {
|
||||
userId: session?.user?.id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
import { headers } from 'next/headers'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type Stripe from 'stripe'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { handleInvoiceWebhook } from '@/lib/billing/webhooks/stripe-invoice-webhooks'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('StripeInvoiceWebhook')
|
||||
|
||||
/**
|
||||
* Stripe billing webhook endpoint for invoice-related events
|
||||
* Endpoint: /api/billing/webhooks/stripe
|
||||
* Handles: invoice.payment_succeeded, invoice.payment_failed, invoice.finalized
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.text()
|
||||
const headersList = await headers()
|
||||
const signature = headersList.get('stripe-signature')
|
||||
|
||||
if (!signature) {
|
||||
logger.error('Missing Stripe signature header')
|
||||
return NextResponse.json({ error: 'Missing Stripe signature' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!env.STRIPE_BILLING_WEBHOOK_SECRET) {
|
||||
logger.error('Missing Stripe webhook secret configuration')
|
||||
return NextResponse.json({ error: 'Webhook secret not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
// Check if Stripe client is available
|
||||
let stripe
|
||||
try {
|
||||
stripe = requireStripeClient()
|
||||
} catch (stripeError) {
|
||||
logger.error('Stripe client not available for webhook processing', {
|
||||
error: stripeError,
|
||||
})
|
||||
return NextResponse.json({ error: 'Stripe client not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
// Verify webhook signature
|
||||
let event: Stripe.Event
|
||||
try {
|
||||
event = stripe.webhooks.constructEvent(body, signature, env.STRIPE_BILLING_WEBHOOK_SECRET)
|
||||
} catch (signatureError) {
|
||||
logger.error('Invalid Stripe webhook signature', {
|
||||
error: signatureError,
|
||||
signature,
|
||||
})
|
||||
return NextResponse.json({ error: 'Invalid signature' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Received Stripe invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
})
|
||||
|
||||
// Handle specific invoice events
|
||||
const supportedEvents = [
|
||||
'invoice.payment_succeeded',
|
||||
'invoice.payment_failed',
|
||||
'invoice.finalized',
|
||||
]
|
||||
|
||||
if (supportedEvents.includes(event.type)) {
|
||||
try {
|
||||
await handleInvoiceWebhook(event)
|
||||
|
||||
logger.info('Successfully processed invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
})
|
||||
|
||||
return NextResponse.json({ received: true })
|
||||
} catch (processingError) {
|
||||
logger.error('Failed to process invoice webhook', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
error: processingError,
|
||||
})
|
||||
|
||||
// Return 500 to tell Stripe to retry the webhook
|
||||
return NextResponse.json({ error: 'Failed to process webhook' }, { status: 500 })
|
||||
}
|
||||
} else {
|
||||
// Not a supported invoice event, ignore
|
||||
logger.info('Ignoring unsupported webhook event', {
|
||||
eventId: event.id,
|
||||
eventType: event.type,
|
||||
supportedEvents,
|
||||
})
|
||||
|
||||
return NextResponse.json({ received: true })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Fatal error in invoice webhook handler', {
|
||||
error,
|
||||
url: request.url,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET endpoint for webhook health checks
|
||||
*/
|
||||
export async function GET() {
|
||||
return NextResponse.json({
|
||||
status: 'healthy',
|
||||
webhook: 'stripe-invoices',
|
||||
events: ['invoice.payment_succeeded', 'invoice.payment_failed', 'invoice.finalized'],
|
||||
})
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { render } from '@react-email/render'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { renderOTPEmail } from '@/components/emails/render-email'
|
||||
import OTPVerificationEmail from '@/components/emails/otp-verification-email'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getRedisClient, markMessageAsProcessed, releaseLock } from '@/lib/redis'
|
||||
@@ -157,6 +158,7 @@ export async function POST(
|
||||
? deployment.allowedEmails
|
||||
: []
|
||||
|
||||
// Check if the email is allowed
|
||||
const isEmailAllowed =
|
||||
allowedEmails.includes(email) ||
|
||||
allowedEmails.some((allowed: string) => {
|
||||
@@ -174,17 +176,24 @@ export async function POST(
|
||||
)
|
||||
}
|
||||
|
||||
// Generate OTP
|
||||
const otp = generateOTP()
|
||||
|
||||
// Store OTP in Redis - AWAIT THIS BEFORE RETURNING RESPONSE
|
||||
await storeOTP(email, deployment.id, otp)
|
||||
|
||||
const emailHtml = await renderOTPEmail(
|
||||
// Create the email
|
||||
const emailContent = OTPVerificationEmail({
|
||||
otp,
|
||||
email,
|
||||
'email-verification',
|
||||
deployment.title || 'Chat'
|
||||
)
|
||||
type: 'chat-access',
|
||||
chatTitle: deployment.title || 'Chat',
|
||||
})
|
||||
|
||||
// await the render function
|
||||
const emailHtml = await render(emailContent)
|
||||
|
||||
// MAKE SURE TO AWAIT THE EMAIL SENDING
|
||||
const emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: `Verification code for ${deployment.title || 'Chat'}`,
|
||||
|
||||
@@ -194,7 +194,6 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
@@ -220,7 +219,6 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
|
||||
@@ -263,26 +263,17 @@ export async function executeWorkflowForChat(
|
||||
let outputBlockIds: string[] = []
|
||||
|
||||
// Extract output configs from the new schema format
|
||||
let selectedOutputIds: string[] = []
|
||||
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
||||
// Extract output IDs in the format expected by the streaming processor
|
||||
// Extract block IDs and paths from the new outputConfigs array format
|
||||
logger.debug(
|
||||
`[${requestId}] Found ${deployment.outputConfigs.length} output configs in deployment`
|
||||
)
|
||||
|
||||
selectedOutputIds = deployment.outputConfigs.map((config) => {
|
||||
const outputId = config.path
|
||||
? `${config.blockId}_${config.path}`
|
||||
: `${config.blockId}.content`
|
||||
|
||||
deployment.outputConfigs.forEach((config) => {
|
||||
logger.debug(
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'content'} -> outputId=${outputId}`
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'none'}`
|
||||
)
|
||||
|
||||
return outputId
|
||||
})
|
||||
|
||||
// Also extract block IDs for legacy compatibility
|
||||
outputBlockIds = deployment.outputConfigs.map((config) => config.blockId)
|
||||
} else {
|
||||
// Use customizations as fallback
|
||||
@@ -300,9 +291,7 @@ export async function executeWorkflowForChat(
|
||||
outputBlockIds = customizations.outputBlockIds
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Using ${outputBlockIds.length} output blocks and ${selectedOutputIds.length} selected output IDs for extraction`
|
||||
)
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
@@ -468,7 +457,7 @@ export async function executeWorkflowForChat(
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
selectedOutputIds: selectedOutputIds.length > 0 ? selectedOutputIds : outputBlockIds,
|
||||
selectedOutputIds: outputBlockIds,
|
||||
edges: edges.map((e: any) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
type CopilotChat,
|
||||
type CopilotMessage,
|
||||
createChat,
|
||||
generateChatTitle,
|
||||
generateDocsResponse,
|
||||
getChat,
|
||||
updateChat,
|
||||
} from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('CopilotDocsAPI')
|
||||
|
||||
// Schema for docs queries
|
||||
const DocsQuerySchema = z.object({
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
topK: z.number().min(1).max(20).default(5),
|
||||
provider: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
stream: z.boolean().optional().default(false),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/docs
|
||||
* Ask questions about documentation using RAG
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { query, topK, provider, model, stream, chatId, workflowId, createNewChat } =
|
||||
DocsQuerySchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Docs RAG query: "${query}"`, {
|
||||
provider,
|
||||
model,
|
||||
topK,
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
// Handle chat context
|
||||
let currentChat: CopilotChat | null = null
|
||||
let conversationHistory: CopilotMessage[] = []
|
||||
|
||||
if (chatId) {
|
||||
// Load existing chat
|
||||
currentChat = await getChat(chatId, session.user.id)
|
||||
if (currentChat) {
|
||||
conversationHistory = currentChat.messages
|
||||
}
|
||||
} else if (createNewChat && workflowId) {
|
||||
// Create new chat
|
||||
currentChat = await createChat(session.user.id, workflowId)
|
||||
}
|
||||
|
||||
// Generate docs response
|
||||
const result = await generateDocsResponse(query, conversationHistory, {
|
||||
topK,
|
||||
provider,
|
||||
model,
|
||||
stream,
|
||||
workflowId,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (stream && result.response instanceof ReadableStream) {
|
||||
// Handle streaming response with docs sources
|
||||
logger.info(`[${requestId}] Returning streaming docs response`)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = (result.response as ReadableStream).getReader()
|
||||
let accumulatedResponse = ''
|
||||
|
||||
try {
|
||||
// Send initial metadata including sources
|
||||
const metadata = {
|
||||
type: 'metadata',
|
||||
chatId: currentChat?.id,
|
||||
sources: result.sources,
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
metadata: {
|
||||
requestId,
|
||||
chunksFound: result.sources.length,
|
||||
query,
|
||||
topSimilarity: result.sources[0]?.similarity,
|
||||
provider,
|
||||
model,
|
||||
},
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
const chunk = new TextDecoder().decode(value)
|
||||
// Clean up any object serialization artifacts in streaming content
|
||||
const cleanedChunk = chunk.replace(/\[object Object\],?/g, '')
|
||||
accumulatedResponse += cleanedChunk
|
||||
|
||||
const contentChunk = {
|
||||
type: 'content',
|
||||
content: cleanedChunk,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
|
||||
}
|
||||
|
||||
// Send completion marker first to unblock the user
|
||||
controller.enqueue(encoder.encode(`data: {"type":"done"}\n\n`))
|
||||
|
||||
// Save conversation to database asynchronously (non-blocking)
|
||||
if (currentChat) {
|
||||
// Fire-and-forget database save to avoid blocking stream completion
|
||||
Promise.resolve()
|
||||
.then(async () => {
|
||||
try {
|
||||
const userMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: query,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: accumulatedResponse,
|
||||
timestamp: new Date().toISOString(),
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
}
|
||||
|
||||
const updatedMessages = [
|
||||
...conversationHistory,
|
||||
userMessage,
|
||||
assistantMessage,
|
||||
]
|
||||
|
||||
// Generate title if this is the first message
|
||||
let updatedTitle = currentChat.title ?? undefined
|
||||
if (!updatedTitle && conversationHistory.length === 0) {
|
||||
updatedTitle = await generateChatTitle(query)
|
||||
}
|
||||
|
||||
// Update the chat in database
|
||||
await updateChat(currentChat.id, session.user.id, {
|
||||
title: updatedTitle,
|
||||
messages: updatedMessages,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updated chat ${currentChat.id} with new docs messages`
|
||||
)
|
||||
} catch (dbError) {
|
||||
logger.error(`[${requestId}] Failed to save chat to database:`, dbError)
|
||||
// Database errors don't affect the user's streaming experience
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error(`[${requestId}] Unexpected error in async database save:`, error)
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Docs streaming error:`, error)
|
||||
try {
|
||||
const errorChunk = {
|
||||
type: 'error',
|
||||
error: 'Streaming failed',
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
|
||||
} catch (enqueueError) {
|
||||
logger.error(`[${requestId}] Failed to enqueue error response:`, enqueueError)
|
||||
}
|
||||
} finally {
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Handle non-streaming response
|
||||
logger.info(`[${requestId}] Docs RAG response generated successfully`)
|
||||
|
||||
// Save conversation to database if we have a chat
|
||||
if (currentChat) {
|
||||
const userMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: query,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: CopilotMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: typeof result.response === 'string' ? result.response : '[Streaming Response]',
|
||||
timestamp: new Date().toISOString(),
|
||||
citations: result.sources.map((source, index) => ({
|
||||
id: index + 1,
|
||||
title: source.title,
|
||||
url: source.url,
|
||||
})),
|
||||
}
|
||||
|
||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||
|
||||
// Generate title if this is the first message
|
||||
let updatedTitle = currentChat.title ?? undefined
|
||||
if (!updatedTitle && conversationHistory.length === 0) {
|
||||
updatedTitle = await generateChatTitle(query)
|
||||
}
|
||||
|
||||
// Update the chat in database
|
||||
await updateChat(currentChat.id, session.user.id, {
|
||||
title: updatedTitle,
|
||||
messages: updatedMessages,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Updated chat ${currentChat.id} with new docs messages`)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
response: result.response,
|
||||
sources: result.sources,
|
||||
chatId: currentChat?.id,
|
||||
metadata: {
|
||||
requestId,
|
||||
chunksFound: result.sources.length,
|
||||
query,
|
||||
topSimilarity: result.sources[0]?.similarity,
|
||||
provider,
|
||||
model,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Copilot docs error:`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,425 +1,214 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { OpenAI } from 'openai'
|
||||
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
createChat,
|
||||
deleteChat,
|
||||
generateChatTitle,
|
||||
getChat,
|
||||
listChats,
|
||||
sendMessage,
|
||||
updateChat,
|
||||
} from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('CopilotAPI')
|
||||
|
||||
// Interface for StreamingExecution response
|
||||
interface StreamingExecution {
|
||||
stream: ReadableStream
|
||||
execution: Promise<any>
|
||||
}
|
||||
|
||||
// Schema for sending messages
|
||||
const SendMessageSchema = z.object({
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
stream: z.boolean().optional().default(false),
|
||||
const MessageSchema = z.object({
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
})
|
||||
|
||||
// Schema for docs queries
|
||||
const DocsQuerySchema = z.object({
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
topK: z.number().min(1).max(20).default(5),
|
||||
provider: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
stream: z.boolean().optional().default(false),
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
const RequestSchema = z.object({
|
||||
messages: z.array(MessageSchema),
|
||||
workflowState: z.object({
|
||||
blocks: z.record(z.any()),
|
||||
edges: z.array(z.any()),
|
||||
}),
|
||||
})
|
||||
|
||||
// Schema for creating chats
|
||||
const CreateChatSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
title: z.string().optional(),
|
||||
initialMessage: z.string().optional(),
|
||||
})
|
||||
|
||||
// Schema for updating chats
|
||||
const UpdateChatSchema = z.object({
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
messages: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
citations: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.number(),
|
||||
title: z.string(),
|
||||
url: z.string(),
|
||||
similarity: z.number().optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
title: z.string().optional(),
|
||||
})
|
||||
|
||||
// Schema for listing chats
|
||||
const ListChatsSchema = z.object({
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
limit: z.number().min(1).max(100).optional().default(50),
|
||||
offset: z.number().min(0).optional().default(0),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot
|
||||
* Send a message to the copilot
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID()
|
||||
|
||||
try {
|
||||
const body = await req.json()
|
||||
const { message, chatId, workflowId, createNewChat, stream } = SendMessageSchema.parse(body)
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Copilot message: "${message}"`, {
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
stream,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
// Send message using the service
|
||||
const result = await sendMessage({
|
||||
message,
|
||||
chatId,
|
||||
workflowId,
|
||||
createNewChat,
|
||||
stream,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
// Handle streaming response (ReadableStream or StreamingExecution)
|
||||
let streamToRead: ReadableStream | null = null
|
||||
|
||||
// Debug logging to see what we actually got
|
||||
logger.info(`[${requestId}] Response type analysis:`, {
|
||||
responseType: typeof result.response,
|
||||
isReadableStream: result.response instanceof ReadableStream,
|
||||
hasStreamProperty:
|
||||
typeof result.response === 'object' && result.response && 'stream' in result.response,
|
||||
hasExecutionProperty:
|
||||
typeof result.response === 'object' && result.response && 'execution' in result.response,
|
||||
responseKeys:
|
||||
typeof result.response === 'object' && result.response ? Object.keys(result.response) : [],
|
||||
})
|
||||
|
||||
if (result.response instanceof ReadableStream) {
|
||||
logger.info(`[${requestId}] Direct ReadableStream detected`)
|
||||
streamToRead = result.response
|
||||
} else if (
|
||||
typeof result.response === 'object' &&
|
||||
result.response &&
|
||||
'stream' in result.response &&
|
||||
'execution' in result.response
|
||||
) {
|
||||
// Handle StreamingExecution (from providers with tool calls)
|
||||
logger.info(`[${requestId}] StreamingExecution detected`)
|
||||
const streamingExecution = result.response as StreamingExecution
|
||||
streamToRead = streamingExecution.stream
|
||||
|
||||
// No need to extract citations - LLM generates direct markdown links
|
||||
}
|
||||
|
||||
if (streamToRead) {
|
||||
logger.info(`[${requestId}] Returning streaming response`)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
return new Response(
|
||||
new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = streamToRead!.getReader()
|
||||
let accumulatedResponse = ''
|
||||
|
||||
// Send initial metadata
|
||||
const metadata = {
|
||||
type: 'metadata',
|
||||
chatId: result.chatId,
|
||||
metadata: {
|
||||
requestId,
|
||||
message,
|
||||
},
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
const chunkText = new TextDecoder().decode(value)
|
||||
accumulatedResponse += chunkText
|
||||
|
||||
const contentChunk = {
|
||||
type: 'content',
|
||||
content: chunkText,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
|
||||
}
|
||||
|
||||
// Send completion signal
|
||||
const completion = {
|
||||
type: 'complete',
|
||||
finalContent: accumulatedResponse,
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(completion)}\n\n`))
|
||||
controller.close()
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Streaming error:`, error)
|
||||
const errorChunk = {
|
||||
type: 'error',
|
||||
error: 'Streaming failed',
|
||||
}
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
|
||||
controller.close()
|
||||
}
|
||||
const workflowActions = {
|
||||
addBlock: {
|
||||
description: 'Add one new block to the workflow',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['type'],
|
||||
properties: {
|
||||
type: {
|
||||
type: 'string',
|
||||
enum: ['agent', 'api', 'condition', 'function', 'router'],
|
||||
description: 'The type of block to add',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Optional custom name for the block. Do not provide a name unless the user has specified it.',
|
||||
},
|
||||
position: {
|
||||
type: 'object',
|
||||
description:
|
||||
'Optional position for the block. Do not provide a position unless the user has specified it.',
|
||||
properties: {
|
||||
x: { type: 'number' },
|
||||
y: { type: 'number' },
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Handle non-streaming response
|
||||
logger.info(`[${requestId}] Chat response generated successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
response: result.response,
|
||||
chatId: result.chatId,
|
||||
metadata: {
|
||||
requestId,
|
||||
message,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Copilot error:`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
},
|
||||
},
|
||||
addEdge: {
|
||||
description: 'Create a connection (edge) between two blocks',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['sourceId', 'targetId'],
|
||||
properties: {
|
||||
sourceId: {
|
||||
type: 'string',
|
||||
description: 'ID of the source block',
|
||||
},
|
||||
targetId: {
|
||||
type: 'string',
|
||||
description: 'ID of the target block',
|
||||
},
|
||||
sourceHandle: {
|
||||
type: 'string',
|
||||
description: 'Optional handle identifier for the source connection point',
|
||||
},
|
||||
targetHandle: {
|
||||
type: 'string',
|
||||
description: 'Optional handle identifier for the target connection point',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
removeBlock: {
|
||||
description: 'Remove a block from the workflow',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['id'],
|
||||
properties: {
|
||||
id: { type: 'string', description: 'ID of the block to remove' },
|
||||
},
|
||||
},
|
||||
},
|
||||
removeEdge: {
|
||||
description: 'Remove a connection (edge) between blocks',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
required: ['id'],
|
||||
properties: {
|
||||
id: { type: 'string', description: 'ID of the edge to remove' },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/copilot
|
||||
* List chats or get a specific chat
|
||||
*/
|
||||
export async function GET(req: NextRequest) {
|
||||
// System prompt that references workflow state
|
||||
const getSystemPrompt = (workflowState: any) => {
|
||||
const blockCount = Object.keys(workflowState.blocks).length
|
||||
const edgeCount = workflowState.edges.length
|
||||
|
||||
// Create a summary of existing blocks
|
||||
const blockSummary = Object.values(workflowState.blocks)
|
||||
.map((block: any) => `- ${block.type} block named "${block.name}" with id ${block.id}`)
|
||||
.join('\n')
|
||||
|
||||
// Create a summary of existing edges
|
||||
const edgeSummary = workflowState.edges
|
||||
.map((edge: any) => `- ${edge.source} -> ${edge.target} with id ${edge.id}`)
|
||||
.join('\n')
|
||||
|
||||
return `You are a workflow assistant that helps users modify their workflow by adding/removing blocks and connections.
|
||||
|
||||
Current Workflow State:
|
||||
${
|
||||
blockCount === 0
|
||||
? 'The workflow is empty.'
|
||||
: `${blockSummary}
|
||||
|
||||
Connections:
|
||||
${edgeCount === 0 ? 'No connections between blocks.' : edgeSummary}`
|
||||
}
|
||||
|
||||
When users request changes:
|
||||
- Consider existing blocks when suggesting connections
|
||||
- Provide clear feedback about what actions you've taken
|
||||
|
||||
Use the following functions to modify the workflow:
|
||||
1. Use the addBlock function to create a new block
|
||||
2. Use the addEdge function to connect one block to another
|
||||
3. Use the removeBlock function to remove a block
|
||||
4. Use the removeEdge function to remove a connection
|
||||
|
||||
Only use the provided functions and respond naturally to the user's requests.`
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
// Validate API key
|
||||
const apiKey = request.headers.get('X-OpenAI-Key')
|
||||
if (!apiKey) {
|
||||
return NextResponse.json({ error: 'OpenAI API key is required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const chatId = searchParams.get('chatId')
|
||||
// Parse and validate request body
|
||||
const body = await request.json()
|
||||
const validatedData = RequestSchema.parse(body)
|
||||
const { messages, workflowState } = validatedData
|
||||
|
||||
// If chatId is provided, get specific chat
|
||||
if (chatId) {
|
||||
const chat = await getChat(chatId, session.user.id)
|
||||
if (!chat) {
|
||||
return NextResponse.json({ error: 'Chat not found' }, { status: 404 })
|
||||
}
|
||||
// Initialize OpenAI client
|
||||
const openai = new OpenAI({ apiKey })
|
||||
|
||||
// Create message history with workflow context
|
||||
const messageHistory = [
|
||||
{ role: 'system', content: getSystemPrompt(workflowState) },
|
||||
...messages,
|
||||
]
|
||||
|
||||
// Make OpenAI API call with workflow context
|
||||
const completion = await openai.chat.completions.create({
|
||||
model: 'gpt-4o',
|
||||
messages: messageHistory as ChatCompletionMessageParam[],
|
||||
tools: Object.entries(workflowActions).map(([name, config]) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name,
|
||||
description: config.description,
|
||||
parameters: config.parameters,
|
||||
},
|
||||
})),
|
||||
tool_choice: 'auto',
|
||||
})
|
||||
|
||||
const message = completion.choices[0].message
|
||||
|
||||
// Process tool calls if present
|
||||
if (message.tool_calls) {
|
||||
logger.debug(`[${requestId}] Tool calls:`, {
|
||||
toolCalls: message.tool_calls,
|
||||
})
|
||||
const actions = message.tool_calls.map((call) => ({
|
||||
name: call.function.name,
|
||||
parameters: JSON.parse(call.function.arguments),
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
message: message.content || "I've updated the workflow based on your request.",
|
||||
actions,
|
||||
})
|
||||
}
|
||||
|
||||
// Otherwise, list chats
|
||||
const workflowId = searchParams.get('workflowId')
|
||||
const limit = Number.parseInt(searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(searchParams.get('offset') || '0')
|
||||
|
||||
if (!workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'workflowId is required for listing chats' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const chats = await listChats(session.user.id, workflowId, { limit, offset })
|
||||
|
||||
// Return response with no actions
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chats,
|
||||
message:
|
||||
message.content ||
|
||||
"I'm not sure what changes to make to the workflow. Can you please provide more specific instructions?",
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle GET request:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
logger.error(`[${requestId}] Copilot API error:`, { error })
|
||||
|
||||
/**
|
||||
* PUT /api/copilot
|
||||
* Create a new chat
|
||||
*/
|
||||
export async function PUT(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { workflowId, title, initialMessage } = CreateChatSchema.parse(body)
|
||||
|
||||
logger.info(`Creating new chat for user ${session.user.id}, workflow ${workflowId}`)
|
||||
|
||||
const chat = await createChat(session.user.id, workflowId, {
|
||||
title,
|
||||
initialMessage,
|
||||
})
|
||||
|
||||
logger.info(`Created chat ${chat.id} for user ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
})
|
||||
} catch (error) {
|
||||
// Handle specific error types
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ error: 'Invalid request format', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Failed to create chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PATCH /api/copilot
|
||||
* Update a chat with new messages
|
||||
*/
|
||||
export async function PATCH(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { chatId, messages, title } = UpdateChatSchema.parse(body)
|
||||
|
||||
logger.info(`Updating chat ${chatId} for user ${session.user.id}`)
|
||||
|
||||
// Get the current chat to check if it has a title
|
||||
const existingChat = await getChat(chatId, session.user.id)
|
||||
|
||||
let titleToUse = title
|
||||
|
||||
// Generate title if chat doesn't have one and we have messages
|
||||
if (!titleToUse && existingChat && !existingChat.title && messages && messages.length > 0) {
|
||||
const firstUserMessage = messages.find((msg) => msg.role === 'user')
|
||||
if (firstUserMessage) {
|
||||
logger.info('Generating LLM-based title for chat without title')
|
||||
try {
|
||||
titleToUse = await generateChatTitle(firstUserMessage.content)
|
||||
logger.info(`Generated title: ${titleToUse}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate chat title:', error)
|
||||
titleToUse = 'New Chat'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const chat = await updateChat(chatId, session.user.id, {
|
||||
messages,
|
||||
title: titleToUse,
|
||||
})
|
||||
|
||||
if (!chat) {
|
||||
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
chat,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Failed to update chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/copilot
|
||||
* Delete a chat
|
||||
*/
|
||||
export async function DELETE(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
const chatId = searchParams.get('chatId')
|
||||
|
||||
if (!chatId) {
|
||||
return NextResponse.json({ error: 'chatId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const success = await deleteChat(chatId, session.user.id)
|
||||
|
||||
if (!success) {
|
||||
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Chat deleted successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete chat:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
return NextResponse.json({ error: 'Failed to process copilot message' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { searchDocumentation } from '@/lib/copilot/service'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('DocsSearchAPI')
|
||||
|
||||
// Request and response type definitions
|
||||
interface DocsSearchRequest {
|
||||
query: string
|
||||
topK?: number
|
||||
}
|
||||
|
||||
interface DocsSearchResult {
|
||||
id: number
|
||||
title: string
|
||||
url: string
|
||||
content: string
|
||||
similarity: number
|
||||
}
|
||||
|
||||
interface DocsSearchSuccessResponse {
|
||||
success: true
|
||||
results: DocsSearchResult[]
|
||||
query: string
|
||||
totalResults: number
|
||||
searchTime?: number
|
||||
}
|
||||
|
||||
interface DocsSearchErrorResponse {
|
||||
success: false
|
||||
error: string
|
||||
}
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest
|
||||
): Promise<NextResponse<DocsSearchSuccessResponse | DocsSearchErrorResponse>> {
|
||||
try {
|
||||
const requestBody: DocsSearchRequest = await request.json()
|
||||
const { query, topK = 5 } = requestBody
|
||||
|
||||
if (!query) {
|
||||
const errorResponse: DocsSearchErrorResponse = {
|
||||
success: false,
|
||||
error: 'Query is required',
|
||||
}
|
||||
return NextResponse.json(errorResponse, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Executing documentation search', { query, topK })
|
||||
|
||||
const startTime = Date.now()
|
||||
const results = await searchDocumentation(query, { topK })
|
||||
const searchTime = Date.now() - startTime
|
||||
|
||||
logger.info(`Found ${results.length} documentation results`, { query })
|
||||
|
||||
const successResponse: DocsSearchSuccessResponse = {
|
||||
success: true,
|
||||
results,
|
||||
query,
|
||||
totalResults: results.length,
|
||||
searchTime,
|
||||
}
|
||||
|
||||
return NextResponse.json(successResponse)
|
||||
} catch (error) {
|
||||
logger.error('Documentation search API failed', error)
|
||||
|
||||
const errorResponse: DocsSearchErrorResponse = {
|
||||
success: false,
|
||||
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
}
|
||||
|
||||
return NextResponse.json(errorResponse, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
/**
|
||||
* Tests for knowledge document chunks API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockConsoleLogger,
|
||||
mockDrizzleOrm,
|
||||
mockKnowledgeSchemas,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import type { DocumentAccessCheck } from '../../../../utils'
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
mockDrizzleOrm()
|
||||
mockConsoleLogger()
|
||||
|
||||
vi.mock('@/lib/tokenization/estimators', () => ({
|
||||
estimateTokenCount: vi.fn().mockReturnValue({ count: 452 }),
|
||||
}))
|
||||
|
||||
vi.mock('@/providers/utils', () => ({
|
||||
calculateCost: vi.fn().mockReturnValue({
|
||||
input: 0.00000904,
|
||||
output: 0,
|
||||
total: 0.00000904,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('../../../../utils', () => ({
|
||||
checkDocumentAccess: vi.fn(),
|
||||
generateEmbeddings: vi.fn().mockResolvedValue([[0.1, 0.2, 0.3, 0.4, 0.5]]),
|
||||
}))
|
||||
|
||||
describe('Knowledge Document Chunks API Route', () => {
|
||||
const mockAuth$ = mockAuth()
|
||||
|
||||
const mockDbChain = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockReturnThis(),
|
||||
offset: vi.fn().mockReturnThis(),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
returning: vi.fn().mockResolvedValue([]),
|
||||
delete: vi.fn().mockReturnThis(),
|
||||
transaction: vi.fn(),
|
||||
}
|
||||
|
||||
const mockGetUserId = vi.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: mockDbChain,
|
||||
}))
|
||||
|
||||
vi.doMock('@/app/api/auth/oauth/utils', () => ({
|
||||
getUserId: mockGetUserId,
|
||||
}))
|
||||
|
||||
Object.values(mockDbChain).forEach((fn) => {
|
||||
if (typeof fn === 'function' && fn !== mockDbChain.values && fn !== mockDbChain.returning) {
|
||||
fn.mockClear().mockReturnThis()
|
||||
}
|
||||
})
|
||||
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-chunk-uuid-1234'),
|
||||
createHash: vi.fn().mockReturnValue({
|
||||
update: vi.fn().mockReturnThis(),
|
||||
digest: vi.fn().mockReturnValue('mock-hash-123'),
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('POST /api/knowledge/[id]/documents/[documentId]/chunks', () => {
|
||||
const validChunkData = {
|
||||
content: 'This is test chunk content for uploading to the knowledge base document.',
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const mockDocumentAccess = {
|
||||
hasAccess: true,
|
||||
notFound: false,
|
||||
reason: '',
|
||||
document: {
|
||||
id: 'doc-123',
|
||||
processingStatus: 'completed',
|
||||
tag1: 'tag1-value',
|
||||
tag2: 'tag2-value',
|
||||
tag3: null,
|
||||
tag4: null,
|
||||
tag5: null,
|
||||
tag6: null,
|
||||
tag7: null,
|
||||
},
|
||||
}
|
||||
|
||||
const mockParams = Promise.resolve({ id: 'kb-123', documentId: 'doc-123' })
|
||||
|
||||
it('should create chunk successfully with cost tracking', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
// Mock transaction
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([{ chunkIndex: 0 }]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
|
||||
// Verify cost tracking
|
||||
expect(data.data.cost).toBeDefined()
|
||||
expect(data.data.cost.input).toBe(0.00000904)
|
||||
expect(data.data.cost.output).toBe(0)
|
||||
expect(data.data.cost.total).toBe(0.00000904)
|
||||
expect(data.data.cost.tokens).toEqual({
|
||||
prompt: 452,
|
||||
completion: 0,
|
||||
total: 452,
|
||||
})
|
||||
expect(data.data.cost.model).toBe('text-embedding-3-small')
|
||||
expect(data.data.cost.pricing).toEqual({
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
})
|
||||
|
||||
// Verify function calls
|
||||
expect(estimateTokenCount).toHaveBeenCalledWith(validChunkData.content, 'openai')
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 452, 0, false)
|
||||
})
|
||||
|
||||
it('should handle workflow-based authentication', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
const workflowData = {
|
||||
...validChunkData,
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', workflowData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(mockGetUserId).toHaveBeenCalledWith(expect.any(String), 'workflow-123')
|
||||
})
|
||||
|
||||
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('should return not found for workflow that does not exist', async () => {
|
||||
const workflowData = {
|
||||
...validChunkData,
|
||||
workflowId: 'nonexistent-workflow',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', workflowData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
expect(data.error).toBe('Workflow not found')
|
||||
})
|
||||
|
||||
it.concurrent('should return not found for document access denied', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
hasAccess: false,
|
||||
notFound: true,
|
||||
reason: 'Document not found',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(404)
|
||||
expect(data.error).toBe('Document not found')
|
||||
})
|
||||
|
||||
it('should return unauthorized for unauthorized document access', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
hasAccess: false,
|
||||
notFound: false,
|
||||
reason: 'Unauthorized access',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it('should reject chunks for failed documents', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue({
|
||||
...mockDocumentAccess,
|
||||
document: {
|
||||
...mockDocumentAccess.document!,
|
||||
processingStatus: 'failed',
|
||||
},
|
||||
} as DocumentAccessCheck)
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Cannot add chunks to failed document')
|
||||
})
|
||||
|
||||
it.concurrent('should validate chunk data', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const invalidData = {
|
||||
content: '', // Empty content
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', invalidData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Invalid request data')
|
||||
expect(data.details).toBeDefined()
|
||||
})
|
||||
|
||||
it('should inherit tags from parent document', async () => {
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockImplementation((data) => {
|
||||
// Verify that tags are inherited from document
|
||||
expect(data.tag1).toBe('tag1-value')
|
||||
expect(data.tag2).toBe('tag2-value')
|
||||
expect(data.tag3).toBe(null)
|
||||
return Promise.resolve(undefined)
|
||||
}),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validChunkData)
|
||||
const { POST } = await import('./route')
|
||||
await POST(req, { params: mockParams })
|
||||
|
||||
expect(mockTx.values).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it.concurrent('should handle cost calculation with different content lengths', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
const { checkDocumentAccess } = await import('../../../../utils')
|
||||
|
||||
// Mock larger content with more tokens
|
||||
vi.mocked(estimateTokenCount).mockReturnValue({
|
||||
count: 1000,
|
||||
confidence: 'high',
|
||||
provider: 'openai',
|
||||
method: 'precise',
|
||||
})
|
||||
vi.mocked(calculateCost).mockReturnValue({
|
||||
input: 0.00002,
|
||||
output: 0,
|
||||
total: 0.00002,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
})
|
||||
|
||||
const largeChunkData = {
|
||||
content:
|
||||
'This is a much larger chunk of content that would result in significantly more tokens when processed through the OpenAI tokenization system for embedding generation. This content is designed to test the cost calculation accuracy with larger input sizes.',
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
vi.mocked(checkDocumentAccess).mockResolvedValue(mockDocumentAccess as DocumentAccessCheck)
|
||||
|
||||
const mockTx = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockResolvedValue([]),
|
||||
insert: vi.fn().mockReturnThis(),
|
||||
values: vi.fn().mockResolvedValue(undefined),
|
||||
update: vi.fn().mockReturnThis(),
|
||||
set: vi.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
mockDbChain.transaction.mockImplementation(async (callback) => {
|
||||
return await callback(mockTx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', largeChunkData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req, { params: mockParams })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.cost.input).toBe(0.00002)
|
||||
expect(data.data.cost.tokens.prompt).toBe(1000)
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 1000, 0, false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -4,11 +4,9 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { document, embedding } from '@/db/schema'
|
||||
import { calculateCost } from '@/providers/utils'
|
||||
import { checkDocumentAccess, generateEmbeddings } from '../../../../utils'
|
||||
|
||||
const logger = createLogger('DocumentChunksAPI')
|
||||
@@ -120,13 +118,7 @@ export async function GET(
|
||||
enabled: embedding.enabled,
|
||||
startOffset: embedding.startOffset,
|
||||
endOffset: embedding.endOffset,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
metadata: embedding.metadata,
|
||||
createdAt: embedding.createdAt,
|
||||
updatedAt: embedding.updatedAt,
|
||||
})
|
||||
@@ -219,9 +211,6 @@ export async function POST(
|
||||
logger.info(`[${requestId}] Generating embedding for manual chunk`)
|
||||
const embeddings = await generateEmbeddings([validatedData.content])
|
||||
|
||||
// Calculate accurate token count for both database storage and cost calculation
|
||||
const tokenCount = estimateTokenCount(validatedData.content, 'openai')
|
||||
|
||||
const chunkId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
@@ -245,19 +234,12 @@ export async function POST(
|
||||
chunkHash: crypto.createHash('sha256').update(validatedData.content).digest('hex'),
|
||||
content: validatedData.content,
|
||||
contentLength: validatedData.content.length,
|
||||
tokenCount: tokenCount.count, // Use accurate token count
|
||||
tokenCount: Math.ceil(validatedData.content.length / 4), // Rough approximation
|
||||
embedding: embeddings[0],
|
||||
embeddingModel: 'text-embedding-3-small',
|
||||
startOffset: 0, // Manual chunks don't have document offsets
|
||||
endOffset: validatedData.content.length,
|
||||
// Inherit tags from parent document
|
||||
tag1: doc.tag1,
|
||||
tag2: doc.tag2,
|
||||
tag3: doc.tag3,
|
||||
tag4: doc.tag4,
|
||||
tag5: doc.tag5,
|
||||
tag6: doc.tag6,
|
||||
tag7: doc.tag7,
|
||||
metadata: { manual: true }, // Mark as manually created
|
||||
enabled: validatedData.enabled,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
@@ -281,38 +263,9 @@ export async function POST(
|
||||
|
||||
logger.info(`[${requestId}] Manual chunk created: ${chunkId} in document ${documentId}`)
|
||||
|
||||
// Calculate cost for the embedding (with fallback if calculation fails)
|
||||
let cost = null
|
||||
try {
|
||||
cost = calculateCost('text-embedding-3-small', tokenCount.count, 0, false)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to calculate cost for chunk upload`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
// Continue without cost information rather than failing the upload
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
...newChunk,
|
||||
...(cost
|
||||
? {
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
tokens: {
|
||||
prompt: tokenCount.count,
|
||||
completion: 0,
|
||||
total: tokenCount.count,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: cost.pricing,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
data: newChunk,
|
||||
})
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
|
||||
@@ -153,14 +153,6 @@ const CreateDocumentSchema = z.object({
|
||||
fileUrl: z.string().url('File URL must be valid'),
|
||||
fileSize: z.number().min(1, 'File size must be greater than 0'),
|
||||
mimeType: z.string().min(1, 'MIME type is required'),
|
||||
// Document tags for filtering
|
||||
tag1: z.string().optional(),
|
||||
tag2: z.string().optional(),
|
||||
tag3: z.string().optional(),
|
||||
tag4: z.string().optional(),
|
||||
tag5: z.string().optional(),
|
||||
tag6: z.string().optional(),
|
||||
tag7: z.string().optional(),
|
||||
})
|
||||
|
||||
const BulkCreateDocumentsSchema = z.object({
|
||||
@@ -237,14 +229,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
processingError: document.processingError,
|
||||
enabled: document.enabled,
|
||||
uploadedAt: document.uploadedAt,
|
||||
// Include tags in response
|
||||
tag1: document.tag1,
|
||||
tag2: document.tag2,
|
||||
tag3: document.tag3,
|
||||
tag4: document.tag4,
|
||||
tag5: document.tag5,
|
||||
tag6: document.tag6,
|
||||
tag7: document.tag7,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(...whereConditions))
|
||||
@@ -314,14 +298,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
processingStatus: 'pending' as const,
|
||||
enabled: true,
|
||||
uploadedAt: now,
|
||||
// Include tags from upload
|
||||
tag1: docData.tag1 || null,
|
||||
tag2: docData.tag2 || null,
|
||||
tag3: docData.tag3 || null,
|
||||
tag4: docData.tag4 || null,
|
||||
tag5: docData.tag5 || null,
|
||||
tag6: docData.tag6 || null,
|
||||
tag7: docData.tag7 || null,
|
||||
}
|
||||
|
||||
await tx.insert(document).values(newDocument)
|
||||
@@ -396,14 +372,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
characterCount: 0,
|
||||
enabled: true,
|
||||
uploadedAt: now,
|
||||
// Include tags from upload
|
||||
tag1: validatedData.tag1 || null,
|
||||
tag2: validatedData.tag2 || null,
|
||||
tag3: validatedData.tag3 || null,
|
||||
tag4: validatedData.tag4 || null,
|
||||
tag5: validatedData.tag5 || null,
|
||||
tag6: validatedData.tag6 || null,
|
||||
tag7: validatedData.tag7 || null,
|
||||
}
|
||||
|
||||
await db.insert(document).values(newDocument)
|
||||
|
||||
@@ -8,6 +8,7 @@ import { document, knowledgeBase } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('KnowledgeBaseAPI')
|
||||
|
||||
// Schema for knowledge base creation
|
||||
const CreateKnowledgeBaseSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
description: z.string().optional(),
|
||||
|
||||
@@ -34,23 +34,6 @@ vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/tokenization/estimators', () => ({
|
||||
estimateTokenCount: vi.fn().mockReturnValue({ count: 521 }),
|
||||
}))
|
||||
|
||||
vi.mock('@/providers/utils', () => ({
|
||||
calculateCost: vi.fn().mockReturnValue({
|
||||
input: 0.00001042,
|
||||
output: 0,
|
||||
total: 0.00001042,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
mockConsoleLogger()
|
||||
|
||||
describe('Knowledge Search API Route', () => {
|
||||
@@ -223,7 +206,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(mockGetUserId).toHaveBeenCalledWith(expect.any(String), 'workflow-123')
|
||||
})
|
||||
|
||||
it.concurrent('should return unauthorized for unauthenticated request', async () => {
|
||||
it('should return unauthorized for unauthenticated request', async () => {
|
||||
mockGetUserId.mockResolvedValue(null)
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
@@ -235,7 +218,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Unauthorized')
|
||||
})
|
||||
|
||||
it.concurrent('should return not found for workflow that does not exist', async () => {
|
||||
it('should return not found for workflow that does not exist', async () => {
|
||||
const workflowData = {
|
||||
...validSearchData,
|
||||
workflowId: 'nonexistent-workflow',
|
||||
@@ -285,7 +268,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Knowledge bases not found: kb-missing')
|
||||
})
|
||||
|
||||
it.concurrent('should validate search parameters', async () => {
|
||||
it('should validate search parameters', async () => {
|
||||
const invalidData = {
|
||||
knowledgeBaseIds: '', // Empty string
|
||||
query: '', // Empty query
|
||||
@@ -331,7 +314,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.data.topK).toBe(10) // Default value
|
||||
})
|
||||
|
||||
it.concurrent('should handle OpenAI API errors', async () => {
|
||||
it('should handle OpenAI API errors', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
|
||||
@@ -351,7 +334,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it.concurrent('should handle missing OpenAI API key', async () => {
|
||||
it('should handle missing OpenAI API key', async () => {
|
||||
vi.doMock('@/lib/env', () => ({
|
||||
env: {
|
||||
OPENAI_API_KEY: undefined,
|
||||
@@ -370,7 +353,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it.concurrent('should handle database errors during search', async () => {
|
||||
it('should handle database errors during search', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockRejectedValueOnce(new Error('Database error'))
|
||||
@@ -392,7 +375,7 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
it.concurrent('should handle invalid OpenAI response format', async () => {
|
||||
it('should handle invalid OpenAI response format', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
|
||||
@@ -412,124 +395,5 @@ describe('Knowledge Search API Route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Failed to perform vector search')
|
||||
})
|
||||
|
||||
describe('Cost tracking', () => {
|
||||
it.concurrent('should include cost information in successful search response', async () => {
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
|
||||
// Verify cost information is included
|
||||
expect(data.data.cost).toBeDefined()
|
||||
expect(data.data.cost.input).toBe(0.00001042)
|
||||
expect(data.data.cost.output).toBe(0)
|
||||
expect(data.data.cost.total).toBe(0.00001042)
|
||||
expect(data.data.cost.tokens).toEqual({
|
||||
prompt: 521,
|
||||
completion: 0,
|
||||
total: 521,
|
||||
})
|
||||
expect(data.data.cost.model).toBe('text-embedding-3-small')
|
||||
expect(data.data.cost.pricing).toEqual({
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
})
|
||||
})
|
||||
|
||||
it('should call cost calculation functions with correct parameters', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', validSearchData)
|
||||
const { POST } = await import('./route')
|
||||
await POST(req)
|
||||
|
||||
// Verify token estimation was called with correct parameters
|
||||
expect(estimateTokenCount).toHaveBeenCalledWith('test search query', 'openai')
|
||||
|
||||
// Verify cost calculation was called with correct parameters
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 521, 0, false)
|
||||
})
|
||||
|
||||
it('should handle cost calculation with different query lengths', async () => {
|
||||
const { estimateTokenCount } = await import('@/lib/tokenization/estimators')
|
||||
const { calculateCost } = await import('@/providers/utils')
|
||||
|
||||
// Mock different token count for longer query
|
||||
vi.mocked(estimateTokenCount).mockReturnValue({
|
||||
count: 1042,
|
||||
confidence: 'high',
|
||||
provider: 'openai',
|
||||
method: 'precise',
|
||||
})
|
||||
vi.mocked(calculateCost).mockReturnValue({
|
||||
input: 0.00002084,
|
||||
output: 0,
|
||||
total: 0.00002084,
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
})
|
||||
|
||||
const longQueryData = {
|
||||
...validSearchData,
|
||||
query:
|
||||
'This is a much longer search query with many more tokens to test cost calculation accuracy',
|
||||
}
|
||||
|
||||
mockGetUserId.mockResolvedValue('user-123')
|
||||
mockDbChain.where.mockResolvedValueOnce(mockKnowledgeBases)
|
||||
mockDbChain.limit.mockResolvedValueOnce(mockSearchResults)
|
||||
|
||||
mockFetch.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [{ embedding: mockEmbedding }],
|
||||
}),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', longQueryData)
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data.cost.input).toBe(0.00002084)
|
||||
expect(data.data.cost.tokens.prompt).toBe(1042)
|
||||
expect(calculateCost).toHaveBeenCalledWith('text-embedding-3-small', 1042, 0, false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,37 +4,12 @@ import { z } from 'zod'
|
||||
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { embedding, knowledgeBase } from '@/db/schema'
|
||||
import { calculateCost } from '@/providers/utils'
|
||||
|
||||
const logger = createLogger('VectorSearchAPI')
|
||||
|
||||
function getTagFilters(filters: Record<string, string>, embedding: any) {
|
||||
return Object.entries(filters).map(([key, value]) => {
|
||||
switch (key) {
|
||||
case 'tag1':
|
||||
return sql`LOWER(${embedding.tag1}) = LOWER(${value})`
|
||||
case 'tag2':
|
||||
return sql`LOWER(${embedding.tag2}) = LOWER(${value})`
|
||||
case 'tag3':
|
||||
return sql`LOWER(${embedding.tag3}) = LOWER(${value})`
|
||||
case 'tag4':
|
||||
return sql`LOWER(${embedding.tag4}) = LOWER(${value})`
|
||||
case 'tag5':
|
||||
return sql`LOWER(${embedding.tag5}) = LOWER(${value})`
|
||||
case 'tag6':
|
||||
return sql`LOWER(${embedding.tag6}) = LOWER(${value})`
|
||||
case 'tag7':
|
||||
return sql`LOWER(${embedding.tag7}) = LOWER(${value})`
|
||||
default:
|
||||
return sql`1=1` // No-op for unknown keys
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
class APIError extends Error {
|
||||
public status: number
|
||||
|
||||
@@ -52,17 +27,6 @@ const VectorSearchSchema = z.object({
|
||||
]),
|
||||
query: z.string().min(1, 'Search query is required'),
|
||||
topK: z.number().min(1).max(100).default(10),
|
||||
filters: z
|
||||
.object({
|
||||
tag1: z.string().optional(),
|
||||
tag2: z.string().optional(),
|
||||
tag3: z.string().optional(),
|
||||
tag4: z.string().optional(),
|
||||
tag5: z.string().optional(),
|
||||
tag6: z.string().optional(),
|
||||
tag7: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
async function generateSearchEmbedding(query: string): Promise<number[]> {
|
||||
@@ -138,8 +102,7 @@ async function executeParallelQueries(
|
||||
knowledgeBaseIds: string[],
|
||||
queryVector: string,
|
||||
topK: number,
|
||||
distanceThreshold: number,
|
||||
filters?: Record<string, string>
|
||||
distanceThreshold: number
|
||||
) {
|
||||
const parallelLimit = Math.ceil(topK / knowledgeBaseIds.length) + 5
|
||||
|
||||
@@ -150,13 +113,7 @@ async function executeParallelQueries(
|
||||
content: embedding.content,
|
||||
documentId: embedding.documentId,
|
||||
chunkIndex: embedding.chunkIndex,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
metadata: embedding.metadata,
|
||||
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
|
||||
knowledgeBaseId: embedding.knowledgeBaseId,
|
||||
})
|
||||
@@ -165,8 +122,7 @@ async function executeParallelQueries(
|
||||
and(
|
||||
eq(embedding.knowledgeBaseId, kbId),
|
||||
eq(embedding.enabled, true),
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`,
|
||||
...(filters ? getTagFilters(filters, embedding) : [])
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`
|
||||
)
|
||||
)
|
||||
.orderBy(sql`${embedding.embedding} <=> ${queryVector}::vector`)
|
||||
@@ -183,8 +139,7 @@ async function executeSingleQuery(
|
||||
knowledgeBaseIds: string[],
|
||||
queryVector: string,
|
||||
topK: number,
|
||||
distanceThreshold: number,
|
||||
filters?: Record<string, string>
|
||||
distanceThreshold: number
|
||||
) {
|
||||
return await db
|
||||
.select({
|
||||
@@ -192,13 +147,7 @@ async function executeSingleQuery(
|
||||
content: embedding.content,
|
||||
documentId: embedding.documentId,
|
||||
chunkIndex: embedding.chunkIndex,
|
||||
tag1: embedding.tag1,
|
||||
tag2: embedding.tag2,
|
||||
tag3: embedding.tag3,
|
||||
tag4: embedding.tag4,
|
||||
tag5: embedding.tag5,
|
||||
tag6: embedding.tag6,
|
||||
tag7: embedding.tag7,
|
||||
metadata: embedding.metadata,
|
||||
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
|
||||
})
|
||||
.from(embedding)
|
||||
@@ -206,29 +155,7 @@ async function executeSingleQuery(
|
||||
and(
|
||||
inArray(embedding.knowledgeBaseId, knowledgeBaseIds),
|
||||
eq(embedding.enabled, true),
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`,
|
||||
...(filters
|
||||
? Object.entries(filters).map(([key, value]) => {
|
||||
switch (key) {
|
||||
case 'tag1':
|
||||
return sql`LOWER(${embedding.tag1}) = LOWER(${value})`
|
||||
case 'tag2':
|
||||
return sql`LOWER(${embedding.tag2}) = LOWER(${value})`
|
||||
case 'tag3':
|
||||
return sql`LOWER(${embedding.tag3}) = LOWER(${value})`
|
||||
case 'tag4':
|
||||
return sql`LOWER(${embedding.tag4}) = LOWER(${value})`
|
||||
case 'tag5':
|
||||
return sql`LOWER(${embedding.tag5}) = LOWER(${value})`
|
||||
case 'tag6':
|
||||
return sql`LOWER(${embedding.tag6}) = LOWER(${value})`
|
||||
case 'tag7':
|
||||
return sql`LOWER(${embedding.tag7}) = LOWER(${value})`
|
||||
default:
|
||||
return sql`1=1` // No-op for unknown keys
|
||||
}
|
||||
})
|
||||
: [])
|
||||
sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}`
|
||||
)
|
||||
)
|
||||
.orderBy(sql`${embedding.embedding} <=> ${queryVector}::vector`)
|
||||
@@ -304,8 +231,7 @@ export async function POST(request: NextRequest) {
|
||||
foundKbIds,
|
||||
queryVector,
|
||||
validatedData.topK,
|
||||
strategy.distanceThreshold,
|
||||
validatedData.filters
|
||||
strategy.distanceThreshold
|
||||
)
|
||||
results = mergeAndRankResults(parallelResults, validatedData.topK)
|
||||
} else {
|
||||
@@ -314,24 +240,10 @@ export async function POST(request: NextRequest) {
|
||||
foundKbIds,
|
||||
queryVector,
|
||||
validatedData.topK,
|
||||
strategy.distanceThreshold,
|
||||
validatedData.filters
|
||||
strategy.distanceThreshold
|
||||
)
|
||||
}
|
||||
|
||||
// Calculate cost for the embedding (with fallback if calculation fails)
|
||||
let cost = null
|
||||
let tokenCount = null
|
||||
try {
|
||||
tokenCount = estimateTokenCount(validatedData.query, 'openai')
|
||||
cost = calculateCost('text-embedding-3-small', tokenCount.count, 0, false)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to calculate cost for search query`, {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
})
|
||||
// Continue without cost information rather than failing the search
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
@@ -340,13 +252,7 @@ export async function POST(request: NextRequest) {
|
||||
content: result.content,
|
||||
documentId: result.documentId,
|
||||
chunkIndex: result.chunkIndex,
|
||||
tag1: result.tag1,
|
||||
tag2: result.tag2,
|
||||
tag3: result.tag3,
|
||||
tag4: result.tag4,
|
||||
tag5: result.tag5,
|
||||
tag6: result.tag6,
|
||||
tag7: result.tag7,
|
||||
metadata: result.metadata,
|
||||
similarity: 1 - result.distance,
|
||||
})),
|
||||
query: validatedData.query,
|
||||
@@ -354,22 +260,6 @@ export async function POST(request: NextRequest) {
|
||||
knowledgeBaseId: foundKbIds[0],
|
||||
topK: validatedData.topK,
|
||||
totalResults: results.length,
|
||||
...(cost && tokenCount
|
||||
? {
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
tokens: {
|
||||
prompt: tokenCount.count,
|
||||
completion: 0,
|
||||
total: tokenCount.count,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: cost.pricing,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
})
|
||||
} catch (validationError) {
|
||||
|
||||
@@ -73,14 +73,6 @@ export interface DocumentData {
|
||||
enabled: boolean
|
||||
deletedAt?: Date | null
|
||||
uploadedAt: Date
|
||||
// Document tags
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
}
|
||||
|
||||
export interface EmbeddingData {
|
||||
@@ -96,14 +88,7 @@ export interface EmbeddingData {
|
||||
embeddingModel: string
|
||||
startOffset: number
|
||||
endOffset: number
|
||||
// Tag fields for filtering
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
metadata: unknown
|
||||
enabled: boolean
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
@@ -460,26 +445,7 @@ export async function processDocumentAsync(
|
||||
const chunkTexts = processed.chunks.map((chunk) => chunk.text)
|
||||
const embeddings = chunkTexts.length > 0 ? await generateEmbeddings(chunkTexts) : []
|
||||
|
||||
logger.info(`[${documentId}] Embeddings generated, fetching document tags`)
|
||||
|
||||
// Fetch document to get tags
|
||||
const documentRecord = await db
|
||||
.select({
|
||||
tag1: document.tag1,
|
||||
tag2: document.tag2,
|
||||
tag3: document.tag3,
|
||||
tag4: document.tag4,
|
||||
tag5: document.tag5,
|
||||
tag6: document.tag6,
|
||||
tag7: document.tag7,
|
||||
})
|
||||
.from(document)
|
||||
.where(eq(document.id, documentId))
|
||||
.limit(1)
|
||||
|
||||
const documentTags = documentRecord[0] || {}
|
||||
|
||||
logger.info(`[${documentId}] Creating embedding records with tags`)
|
||||
logger.info(`[${documentId}] Embeddings generated, updating document record`)
|
||||
|
||||
const embeddingRecords = processed.chunks.map((chunk, chunkIndex) => ({
|
||||
id: crypto.randomUUID(),
|
||||
@@ -494,14 +460,7 @@ export async function processDocumentAsync(
|
||||
embeddingModel: 'text-embedding-3-small',
|
||||
startOffset: chunk.metadata.startIndex,
|
||||
endOffset: chunk.metadata.endIndex,
|
||||
// Copy tags from document
|
||||
tag1: documentTags.tag1,
|
||||
tag2: documentTags.tag2,
|
||||
tag3: documentTags.tag3,
|
||||
tag4: documentTags.tag4,
|
||||
tag5: documentTags.tag5,
|
||||
tag6: documentTags.tag6,
|
||||
tag7: documentTags.tag7,
|
||||
metadata: {},
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { PutObjectCommand } from '@aws-sdk/client-s3'
|
||||
import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { snapshotService } from '@/lib/logs/snapshot-service'
|
||||
@@ -19,11 +18,17 @@ const S3_CONFIG = {
|
||||
region: env.AWS_REGION || '',
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const authError = verifyCronAuth(request, 'logs cleanup')
|
||||
if (authError) {
|
||||
return authError
|
||||
const authHeader = request.headers.get('authorization')
|
||||
|
||||
if (!env.CRON_SECRET) {
|
||||
return new NextResponse('Configuration error: Cron secret is not set', { status: 500 })
|
||||
}
|
||||
|
||||
if (!authHeader || authHeader !== `Bearer ${env.CRON_SECRET}`) {
|
||||
logger.warn('Unauthorized access attempt to logs cleanup endpoint')
|
||||
return new NextResponse('Unauthorized', { status: 401 })
|
||||
}
|
||||
|
||||
if (!S3_CONFIG.bucket || !S3_CONFIG.region) {
|
||||
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { marketplace } from '@/db/schema'
|
||||
import * as schema from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceInfoAPI')
|
||||
|
||||
@@ -24,8 +24,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Fetch marketplace data for the workflow
|
||||
const marketplaceEntry = await db
|
||||
.select()
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, id))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { marketplace, workflow } from '@/db/schema'
|
||||
import * as schema from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceUnpublishAPI')
|
||||
|
||||
@@ -34,13 +34,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Get the marketplace entry using the marketplace ID
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
authorId: marketplace.authorId,
|
||||
name: marketplace.name,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
authorId: schema.marketplace.authorId,
|
||||
name: schema.marketplace.name,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -60,33 +60,36 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
const workflowId = marketplaceEntry.workflowId
|
||||
|
||||
// Verify the workflow exists and belongs to the user
|
||||
const workflowEntry = await db
|
||||
const workflow = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
userId: workflow.userId,
|
||||
id: schema.workflow.id,
|
||||
userId: schema.workflow.userId,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.from(schema.workflow)
|
||||
.where(eq(schema.workflow.id, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!workflowEntry) {
|
||||
if (!workflow) {
|
||||
logger.warn(`[${requestId}] Associated workflow not found: ${workflowId}`)
|
||||
// We'll still delete the marketplace entry even if the workflow is missing
|
||||
} else if (workflowEntry.userId !== userId) {
|
||||
} else if (workflow.userId !== userId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workflow ${workflowId} belongs to user ${workflowEntry.userId}, not current user ${userId}`
|
||||
`[${requestId}] Workflow ${workflowId} belongs to user ${workflow.userId}, not current user ${userId}`
|
||||
)
|
||||
return createErrorResponse('You do not have permission to unpublish this workflow', 403)
|
||||
}
|
||||
|
||||
try {
|
||||
// Delete the marketplace entry - this is the primary action
|
||||
await db.delete(marketplace).where(eq(marketplace.id, id))
|
||||
await db.delete(schema.marketplace).where(eq(schema.marketplace.id, id))
|
||||
|
||||
// Update the workflow to mark it as unpublished if it exists
|
||||
if (workflowEntry) {
|
||||
await db.update(workflow).set({ isPublished: false }).where(eq(workflow.id, workflowId))
|
||||
if (workflow) {
|
||||
await db
|
||||
.update(schema.workflow)
|
||||
.set({ isPublished: false })
|
||||
.where(eq(schema.workflow.id, workflowId))
|
||||
}
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -3,7 +3,7 @@ import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { marketplace } from '@/db/schema'
|
||||
import * as schema from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceViewAPI')
|
||||
|
||||
@@ -22,10 +22,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Find the marketplace entry for this marketplace ID
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
id: schema.marketplace.id,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -36,11 +36,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
// Increment the view count for this workflow
|
||||
await db
|
||||
.update(marketplace)
|
||||
.update(schema.marketplace)
|
||||
.set({
|
||||
views: sql`${marketplace.views} + 1`,
|
||||
views: sql`${schema.marketplace.views} + 1`,
|
||||
})
|
||||
.where(eq(marketplace.id, id))
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Incremented view count for marketplace entry: ${id}`)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { CATEGORIES } from '@/app/workspace/[workspaceId]/marketplace/constants/categories'
|
||||
import { db } from '@/db'
|
||||
import { marketplace } from '@/db/schema'
|
||||
import * as schema from '@/db/schema'
|
||||
|
||||
const logger = createLogger('MarketplaceWorkflowsAPI')
|
||||
|
||||
@@ -50,39 +50,39 @@ export async function GET(request: NextRequest) {
|
||||
// Query with state included
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
state: marketplace.state,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
state: schema.marketplace.state,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, workflowId))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
} else {
|
||||
// Query without state
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.workflowId, workflowId))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.workflowId, workflowId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
}
|
||||
@@ -114,39 +114,39 @@ export async function GET(request: NextRequest) {
|
||||
// Query with state included
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
state: marketplace.state,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
state: schema.marketplace.state,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, marketplaceId))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, marketplaceId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
} else {
|
||||
// Query without state
|
||||
marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorId: marketplace.authorId,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorId: schema.marketplace.authorId,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, marketplaceId))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, marketplaceId))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
}
|
||||
@@ -183,19 +183,21 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
// Define common fields to select
|
||||
const baseFields = {
|
||||
id: marketplace.id,
|
||||
workflowId: marketplace.workflowId,
|
||||
name: marketplace.name,
|
||||
description: marketplace.description,
|
||||
authorName: marketplace.authorName,
|
||||
views: marketplace.views,
|
||||
category: marketplace.category,
|
||||
createdAt: marketplace.createdAt,
|
||||
updatedAt: marketplace.updatedAt,
|
||||
id: schema.marketplace.id,
|
||||
workflowId: schema.marketplace.workflowId,
|
||||
name: schema.marketplace.name,
|
||||
description: schema.marketplace.description,
|
||||
authorName: schema.marketplace.authorName,
|
||||
views: schema.marketplace.views,
|
||||
category: schema.marketplace.category,
|
||||
createdAt: schema.marketplace.createdAt,
|
||||
updatedAt: schema.marketplace.updatedAt,
|
||||
}
|
||||
|
||||
// Add state if requested
|
||||
const selectFields = includeState ? { ...baseFields, state: marketplace.state } : baseFields
|
||||
const selectFields = includeState
|
||||
? { ...baseFields, state: schema.marketplace.state }
|
||||
: baseFields
|
||||
|
||||
// Determine which sections to fetch
|
||||
const sections = sectionParam ? sectionParam.split(',') : ['popular', 'recent', 'byCategory']
|
||||
@@ -204,8 +206,8 @@ export async function GET(request: NextRequest) {
|
||||
if (sections.includes('popular')) {
|
||||
result.popular = await db
|
||||
.select(selectFields)
|
||||
.from(marketplace)
|
||||
.orderBy(desc(marketplace.views))
|
||||
.from(schema.marketplace)
|
||||
.orderBy(desc(schema.marketplace.views))
|
||||
.limit(limit)
|
||||
}
|
||||
|
||||
@@ -213,8 +215,8 @@ export async function GET(request: NextRequest) {
|
||||
if (sections.includes('recent')) {
|
||||
result.recent = await db
|
||||
.select(selectFields)
|
||||
.from(marketplace)
|
||||
.orderBy(desc(marketplace.createdAt))
|
||||
.from(schema.marketplace)
|
||||
.orderBy(desc(schema.marketplace.createdAt))
|
||||
.limit(limit)
|
||||
}
|
||||
|
||||
@@ -253,9 +255,9 @@ export async function GET(request: NextRequest) {
|
||||
categoriesToFetch.map(async (categoryValue) => {
|
||||
const categoryItems = await db
|
||||
.select(selectFields)
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.category, categoryValue))
|
||||
.orderBy(desc(marketplace.views))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.category, categoryValue))
|
||||
.orderBy(desc(schema.marketplace.views))
|
||||
.limit(limit)
|
||||
|
||||
// Always add the category to the result, even if empty
|
||||
@@ -326,10 +328,10 @@ export async function POST(request: NextRequest) {
|
||||
// Find the marketplace entry
|
||||
const marketplaceEntry = await db
|
||||
.select({
|
||||
id: marketplace.id,
|
||||
id: schema.marketplace.id,
|
||||
})
|
||||
.from(marketplace)
|
||||
.where(eq(marketplace.id, id))
|
||||
.from(schema.marketplace)
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
@@ -340,11 +342,11 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Increment the view count
|
||||
await db
|
||||
.update(marketplace)
|
||||
.update(schema.marketplace)
|
||||
.set({
|
||||
views: sql`${marketplace.views} + 1`,
|
||||
views: sql`${schema.marketplace.views} + 1`,
|
||||
})
|
||||
.where(eq(marketplace.id, id))
|
||||
.where(eq(schema.marketplace.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Incremented view count for marketplace entry: ${id}`)
|
||||
|
||||
|
||||
@@ -1,506 +0,0 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import {
|
||||
getEmailSubject,
|
||||
renderBatchInvitationEmail,
|
||||
renderInvitationEmail,
|
||||
} from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
validateBulkInvitations,
|
||||
validateSeatAvailability,
|
||||
} from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { validateAndNormalizeEmail } from '@/lib/email/utils'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { invitation, member, organization, user, workspace, workspaceInvitation } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationInvitationsAPI')
|
||||
|
||||
interface WorkspaceInvitation {
|
||||
workspaceId: string
|
||||
permission: 'admin' | 'write' | 'read'
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/invitations
|
||||
* Get all pending invitations for an organization
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Get all pending invitations for the organization
|
||||
const invitations = await db
|
||||
.select({
|
||||
id: invitation.id,
|
||||
email: invitation.email,
|
||||
role: invitation.role,
|
||||
status: invitation.status,
|
||||
expiresAt: invitation.expiresAt,
|
||||
createdAt: invitation.createdAt,
|
||||
inviterName: user.name,
|
||||
inviterEmail: user.email,
|
||||
})
|
||||
.from(invitation)
|
||||
.leftJoin(user, eq(invitation.inviterId, user.id))
|
||||
.where(eq(invitation.organizationId, organizationId))
|
||||
.orderBy(invitation.createdAt)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
invitations,
|
||||
userRole,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization invitations', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/organizations/[id]/invitations
|
||||
* Create organization invitations with optional validation and batch workspace invitations
|
||||
* Query parameters:
|
||||
* - ?validate=true - Only validate, don't send invitations
|
||||
* - ?batch=true - Include workspace invitations
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const validateOnly = url.searchParams.get('validate') === 'true'
|
||||
const isBatch = url.searchParams.get('batch') === 'true'
|
||||
|
||||
const body = await request.json()
|
||||
const { email, emails, role = 'member', workspaceInvitations } = body
|
||||
|
||||
// Handle single invitation vs batch
|
||||
const invitationEmails = email ? [email] : emails
|
||||
|
||||
// Validate input
|
||||
if (!invitationEmails || !Array.isArray(invitationEmails) || invitationEmails.length === 0) {
|
||||
return NextResponse.json({ error: 'Email or emails array is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!['member', 'admin'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Handle validation-only requests
|
||||
if (validateOnly) {
|
||||
const validationResult = await validateBulkInvitations(organizationId, invitationEmails)
|
||||
|
||||
logger.info('Invitation validation completed', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
emailCount: invitationEmails.length,
|
||||
result: validationResult,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: validationResult,
|
||||
validatedBy: session.user.id,
|
||||
validatedAt: new Date().toISOString(),
|
||||
})
|
||||
}
|
||||
|
||||
// Validate seat availability
|
||||
const seatValidation = await validateSeatAvailability(organizationId, invitationEmails.length)
|
||||
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: seatValidation.reason,
|
||||
seatInfo: {
|
||||
currentSeats: seatValidation.currentSeats,
|
||||
maxSeats: seatValidation.maxSeats,
|
||||
availableSeats: seatValidation.availableSeats,
|
||||
seatsRequested: invitationEmails.length,
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization details
|
||||
const organizationEntry = await db
|
||||
.select({ name: organization.name })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (organizationEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Validate and normalize emails
|
||||
const processedEmails = invitationEmails
|
||||
.map((email: string) => {
|
||||
const result = validateAndNormalizeEmail(email)
|
||||
return result.isValid ? result.normalized : null
|
||||
})
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
if (processedEmails.length === 0) {
|
||||
return NextResponse.json({ error: 'No valid emails provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Handle batch workspace invitations if provided
|
||||
const validWorkspaceInvitations: WorkspaceInvitation[] = []
|
||||
if (isBatch && workspaceInvitations && workspaceInvitations.length > 0) {
|
||||
for (const wsInvitation of workspaceInvitations) {
|
||||
// Check if user has admin permission on this workspace
|
||||
const canInvite = await hasWorkspaceAdminAccess(session.user.id, wsInvitation.workspaceId)
|
||||
|
||||
if (!canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `You don't have permission to invite users to workspace ${wsInvitation.workspaceId}`,
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
validWorkspaceInvitations.push(wsInvitation)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for existing members
|
||||
const existingMembers = await db
|
||||
.select({ userEmail: user.email })
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const existingEmails = existingMembers.map((m) => m.userEmail)
|
||||
const newEmails = processedEmails.filter((email: string) => !existingEmails.includes(email))
|
||||
|
||||
// Check for existing pending invitations
|
||||
const existingInvitations = await db
|
||||
.select({ email: invitation.email })
|
||||
.from(invitation)
|
||||
.where(and(eq(invitation.organizationId, organizationId), eq(invitation.status, 'pending')))
|
||||
|
||||
const pendingEmails = existingInvitations.map((i) => i.email)
|
||||
const emailsToInvite = newEmails.filter((email: string) => !pendingEmails.includes(email))
|
||||
|
||||
if (emailsToInvite.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'All emails are already members or have pending invitations',
|
||||
details: {
|
||||
existingMembers: processedEmails.filter((email: string) =>
|
||||
existingEmails.includes(email)
|
||||
),
|
||||
pendingInvitations: processedEmails.filter((email: string) =>
|
||||
pendingEmails.includes(email)
|
||||
),
|
||||
},
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create invitations
|
||||
const expiresAt = new Date(Date.now() + 7 * 24 * 60 * 60 * 1000) // 7 days
|
||||
const invitationsToCreate = emailsToInvite.map((email: string) => ({
|
||||
id: randomUUID(),
|
||||
email,
|
||||
inviterId: session.user.id,
|
||||
organizationId,
|
||||
role,
|
||||
status: 'pending' as const,
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
}))
|
||||
|
||||
await db.insert(invitation).values(invitationsToCreate)
|
||||
|
||||
// Create workspace invitations if batch mode
|
||||
const workspaceInvitationIds: string[] = []
|
||||
if (isBatch && validWorkspaceInvitations.length > 0) {
|
||||
for (const email of emailsToInvite) {
|
||||
for (const wsInvitation of validWorkspaceInvitations) {
|
||||
const wsInvitationId = randomUUID()
|
||||
const token = randomUUID()
|
||||
|
||||
await db.insert(workspaceInvitation).values({
|
||||
id: wsInvitationId,
|
||||
workspaceId: wsInvitation.workspaceId,
|
||||
email,
|
||||
inviterId: session.user.id,
|
||||
role: 'member',
|
||||
status: 'pending',
|
||||
token,
|
||||
permissions: wsInvitation.permission,
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
workspaceInvitationIds.push(wsInvitationId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send invitation emails
|
||||
const inviter = await db
|
||||
.select({ name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
for (const email of emailsToInvite) {
|
||||
const orgInvitation = invitationsToCreate.find((inv) => inv.email === email)
|
||||
if (!orgInvitation) continue
|
||||
|
||||
let emailResult
|
||||
if (isBatch && validWorkspaceInvitations.length > 0) {
|
||||
// Get workspace details for batch email
|
||||
const workspaceDetails = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(
|
||||
inArray(
|
||||
workspace.id,
|
||||
validWorkspaceInvitations.map((w) => w.workspaceId)
|
||||
)
|
||||
)
|
||||
|
||||
const workspaceInvitationsWithNames = validWorkspaceInvitations.map((wsInv) => ({
|
||||
workspaceId: wsInv.workspaceId,
|
||||
workspaceName:
|
||||
workspaceDetails.find((w) => w.id === wsInv.workspaceId)?.name || 'Unknown Workspace',
|
||||
permission: wsInv.permission,
|
||||
}))
|
||||
|
||||
const emailHtml = await renderBatchInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
role,
|
||||
workspaceInvitationsWithNames,
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${orgInvitation.id}`
|
||||
)
|
||||
|
||||
emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: getEmailSubject('batch-invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
} else {
|
||||
const emailHtml = await renderInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${orgInvitation.id}`,
|
||||
email
|
||||
)
|
||||
|
||||
emailResult = await sendEmail({
|
||||
to: email,
|
||||
subject: getEmailSubject('invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
}
|
||||
|
||||
if (!emailResult.success) {
|
||||
logger.error('Failed to send invitation email', {
|
||||
email,
|
||||
error: emailResult.message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Organization invitations created', {
|
||||
organizationId,
|
||||
invitedBy: session.user.id,
|
||||
invitationCount: invitationsToCreate.length,
|
||||
emails: emailsToInvite,
|
||||
role,
|
||||
isBatch,
|
||||
workspaceInvitationCount: workspaceInvitationIds.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `${invitationsToCreate.length} invitation(s) sent successfully`,
|
||||
data: {
|
||||
invitationsSent: invitationsToCreate.length,
|
||||
invitedEmails: emailsToInvite,
|
||||
existingMembers: processedEmails.filter((email: string) => existingEmails.includes(email)),
|
||||
pendingInvitations: processedEmails.filter((email: string) =>
|
||||
pendingEmails.includes(email)
|
||||
),
|
||||
invalidEmails: invitationEmails.filter(
|
||||
(email: string) => !validateAndNormalizeEmail(email)
|
||||
),
|
||||
workspaceInvitations: isBatch ? validWorkspaceInvitations.length : 0,
|
||||
seatInfo: {
|
||||
seatsUsed: seatValidation.currentSeats + invitationsToCreate.length,
|
||||
maxSeats: seatValidation.maxSeats,
|
||||
availableSeats: seatValidation.availableSeats - invitationsToCreate.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to create organization invitations', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/organizations/[id]/invitations?invitationId=...
|
||||
* Cancel a pending invitation
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const invitationId = url.searchParams.get('invitationId')
|
||||
|
||||
if (!invitationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invitation ID is required as query parameter' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Cancel the invitation
|
||||
const result = await db
|
||||
.update(invitation)
|
||||
.set({
|
||||
status: 'cancelled',
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.id, invitationId),
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (result.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invitation not found or already processed' },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info('Organization invitation cancelled', {
|
||||
organizationId,
|
||||
invitationId,
|
||||
cancelledBy: session.user.id,
|
||||
email: result[0].email,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Invitation cancelled successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to cancel organization invitation', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,314 +0,0 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, user, userStats } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationMemberAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/members/[memberId]
|
||||
* Get individual organization member details
|
||||
*/
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeUsage = url.searchParams.get('include') === 'usage'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = userMember[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
// Get target member details
|
||||
const memberQuery = db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
const memberEntry = await memberQuery
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user can view this member's details
|
||||
const canViewDetails = hasAdminAccess || session.user.id === memberId
|
||||
|
||||
if (!canViewDetails) {
|
||||
return NextResponse.json({ error: 'Forbidden - Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
let memberData = memberEntry[0]
|
||||
|
||||
// Include usage data if requested and user has permission
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const usageData = await db
|
||||
.select({
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
lastPeriodCost: userStats.lastPeriodCost,
|
||||
})
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, memberId))
|
||||
.limit(1)
|
||||
|
||||
if (usageData.length > 0) {
|
||||
memberData = {
|
||||
...memberData,
|
||||
usage: usageData[0],
|
||||
} as typeof memberData & { usage: (typeof usageData)[0] }
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: memberData,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization member', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/organizations/[id]/members/[memberId]
|
||||
* Update organization member role
|
||||
*/
|
||||
export async function PUT(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
const { role } = await request.json()
|
||||
|
||||
// Validate input
|
||||
if (!role || !['admin', 'member'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(userMember[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if target member exists
|
||||
const targetMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
if (targetMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Prevent changing owner role
|
||||
if (targetMember[0].role === 'owner') {
|
||||
return NextResponse.json({ error: 'Cannot change owner role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Prevent non-owners from promoting to admin
|
||||
if (role === 'admin' && userMember[0].role !== 'owner') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only owners can promote members to admin' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Update member role
|
||||
const updatedMember = await db
|
||||
.update(member)
|
||||
.set({ role })
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.returning()
|
||||
|
||||
if (updatedMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Failed to update member role' }, { status: 500 })
|
||||
}
|
||||
|
||||
logger.info('Organization member role updated', {
|
||||
organizationId,
|
||||
memberId,
|
||||
newRole: role,
|
||||
updatedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Member role updated successfully',
|
||||
data: {
|
||||
id: updatedMember[0].id,
|
||||
userId: updatedMember[0].userId,
|
||||
role: updatedMember[0].role,
|
||||
updatedBy: session.user.id,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update organization member role', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/organizations/[id]/members/[memberId]
|
||||
* Remove member from organization
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; memberId: string }> }
|
||||
) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId } = await params
|
||||
|
||||
// Verify user has admin access
|
||||
const userMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (userMember.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const canRemoveMembers =
|
||||
['owner', 'admin'].includes(userMember[0].role) || session.user.id === memberId
|
||||
|
||||
if (!canRemoveMembers) {
|
||||
return NextResponse.json({ error: 'Forbidden - Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if target member exists
|
||||
const targetMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.limit(1)
|
||||
|
||||
if (targetMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Prevent removing the owner
|
||||
if (targetMember[0].role === 'owner') {
|
||||
return NextResponse.json({ error: 'Cannot remove organization owner' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Remove member
|
||||
const removedMember = await db
|
||||
.delete(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, memberId)))
|
||||
.returning()
|
||||
|
||||
if (removedMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Failed to remove member' }, { status: 500 })
|
||||
}
|
||||
|
||||
logger.info('Organization member removed', {
|
||||
organizationId,
|
||||
removedMemberId: memberId,
|
||||
removedBy: session.user.id,
|
||||
wasSelfRemoval: session.user.id === memberId,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message:
|
||||
session.user.id === memberId
|
||||
? 'You have left the organization'
|
||||
: 'Member removed successfully',
|
||||
data: {
|
||||
removedMemberId: memberId,
|
||||
removedBy: session.user.id,
|
||||
removedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove organization member', {
|
||||
organizationId: (await params).id,
|
||||
memberId: (await params).memberId,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,293 +0,0 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails/render-email'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { validateAndNormalizeEmail } from '@/lib/email/utils'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { invitation, member, organization, user, userStats } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationMembersAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/members
|
||||
* Get organization members with optional usage data
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeUsage = url.searchParams.get('include') === 'usage'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
// Get organization members
|
||||
const query = db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
// Include usage data if requested and user has admin access
|
||||
if (includeUsage && hasAdminAccess) {
|
||||
const membersWithUsage = await db
|
||||
.select({
|
||||
id: member.id,
|
||||
userId: member.userId,
|
||||
organizationId: member.organizationId,
|
||||
role: member.role,
|
||||
createdAt: member.createdAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
currentPeriodCost: userStats.currentPeriodCost,
|
||||
currentUsageLimit: userStats.currentUsageLimit,
|
||||
billingPeriodStart: userStats.billingPeriodStart,
|
||||
billingPeriodEnd: userStats.billingPeriodEnd,
|
||||
usageLimitSetBy: userStats.usageLimitSetBy,
|
||||
usageLimitUpdatedAt: userStats.usageLimitUpdatedAt,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.leftJoin(userStats, eq(user.id, userStats.userId))
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: membersWithUsage,
|
||||
total: membersWithUsage.length,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
}
|
||||
|
||||
const members = await query
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: members,
|
||||
total: members.length,
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization members', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/organizations/[id]/members
|
||||
* Invite new member to organization
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const { email, role = 'member' } = await request.json()
|
||||
|
||||
// Validate input
|
||||
if (!email) {
|
||||
return NextResponse.json({ error: 'Email is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!['admin', 'member'].includes(role)) {
|
||||
return NextResponse.json({ error: 'Invalid role' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate and normalize email
|
||||
const { isValid, normalized: normalizedEmail } = validateAndNormalizeEmail(email)
|
||||
if (!isValid) {
|
||||
return NextResponse.json({ error: 'Invalid email format' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check seat availability
|
||||
const seatValidation = await validateSeatAvailability(organizationId, 1)
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Cannot invite member. Using ${seatValidation.currentSeats} of ${seatValidation.maxSeats} seats.`,
|
||||
details: seatValidation,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user is already a member
|
||||
const existingUser = await db
|
||||
.select({ id: user.id })
|
||||
.from(user)
|
||||
.where(eq(user.email, normalizedEmail))
|
||||
.limit(1)
|
||||
|
||||
if (existingUser.length > 0) {
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(eq(member.organizationId, organizationId), eq(member.userId, existingUser[0].id))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMember.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'User is already a member of this organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for existing pending invitation
|
||||
const existingInvitation = await db
|
||||
.select()
|
||||
.from(invitation)
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.email, normalizedEmail),
|
||||
eq(invitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingInvitation.length > 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Pending invitation already exists for this email' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Create invitation
|
||||
const invitationId = randomUUID()
|
||||
const expiresAt = new Date()
|
||||
expiresAt.setDate(expiresAt.getDate() + 7) // 7 days expiry
|
||||
|
||||
await db.insert(invitation).values({
|
||||
id: invitationId,
|
||||
email: normalizedEmail,
|
||||
inviterId: session.user.id,
|
||||
organizationId,
|
||||
role,
|
||||
status: 'pending',
|
||||
expiresAt,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
|
||||
const organizationEntry = await db
|
||||
.select({ name: organization.name })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
const inviter = await db
|
||||
.select({ name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
const emailHtml = await renderInvitationEmail(
|
||||
inviter[0]?.name || 'Someone',
|
||||
organizationEntry[0]?.name || 'organization',
|
||||
`${process.env.NEXT_PUBLIC_BASE_URL}/api/organizations/invitations/accept?id=${invitationId}`,
|
||||
normalizedEmail
|
||||
)
|
||||
|
||||
const emailResult = await sendEmail({
|
||||
to: normalizedEmail,
|
||||
subject: getEmailSubject('invitation'),
|
||||
html: emailHtml,
|
||||
emailType: 'transactional',
|
||||
})
|
||||
|
||||
if (emailResult.success) {
|
||||
logger.info('Member invitation sent', {
|
||||
email: normalizedEmail,
|
||||
organizationId,
|
||||
invitationId,
|
||||
role,
|
||||
})
|
||||
} else {
|
||||
logger.error('Failed to send invitation email', {
|
||||
email: normalizedEmail,
|
||||
error: emailResult.message,
|
||||
})
|
||||
// Don't fail the request if email fails
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Invitation sent to ${normalizedEmail}`,
|
||||
data: {
|
||||
invitationId,
|
||||
email: normalizedEmail,
|
||||
role,
|
||||
expiresAt,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to invite organization member', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
import { and, eq, ne } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
getOrganizationSeatAnalytics,
|
||||
getOrganizationSeatInfo,
|
||||
updateOrganizationSeats,
|
||||
} from '@/lib/billing/validation/seat-management'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, organization } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]
|
||||
* Get organization details including settings and seat information
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const includeSeats = url.searchParams.get('include') === 'seats'
|
||||
|
||||
// Verify user has access to this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get organization data
|
||||
const organizationEntry = await db
|
||||
.select()
|
||||
.from(organization)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.limit(1)
|
||||
|
||||
if (organizationEntry.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
const response: any = {
|
||||
success: true,
|
||||
data: {
|
||||
id: organizationEntry[0].id,
|
||||
name: organizationEntry[0].name,
|
||||
slug: organizationEntry[0].slug,
|
||||
logo: organizationEntry[0].logo,
|
||||
metadata: organizationEntry[0].metadata,
|
||||
createdAt: organizationEntry[0].createdAt,
|
||||
updatedAt: organizationEntry[0].updatedAt,
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
}
|
||||
|
||||
// Include seat information if requested
|
||||
if (includeSeats) {
|
||||
const seatInfo = await getOrganizationSeatInfo(organizationId)
|
||||
if (seatInfo) {
|
||||
response.data.seats = seatInfo
|
||||
}
|
||||
|
||||
// Include analytics for admins
|
||||
if (hasAdminAccess) {
|
||||
const analytics = await getOrganizationSeatAnalytics(organizationId)
|
||||
if (analytics) {
|
||||
response.data.seatAnalytics = analytics
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/organizations/[id]
|
||||
* Update organization settings or seat count
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const body = await request.json()
|
||||
const { name, slug, logo, seats } = body
|
||||
|
||||
// Verify user has admin access
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Forbidden - Not a member of this organization' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!['owner', 'admin'].includes(memberEntry[0].role)) {
|
||||
return NextResponse.json({ error: 'Forbidden - Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Handle seat count update
|
||||
if (seats !== undefined) {
|
||||
if (typeof seats !== 'number' || seats < 1) {
|
||||
return NextResponse.json({ error: 'Invalid seat count' }, { status: 400 })
|
||||
}
|
||||
|
||||
const result = await updateOrganizationSeats(organizationId, seats, session.user.id)
|
||||
|
||||
if (!result.success) {
|
||||
return NextResponse.json({ error: result.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Organization seat count updated', {
|
||||
organizationId,
|
||||
newSeatCount: seats,
|
||||
updatedBy: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Seat count updated successfully',
|
||||
data: {
|
||||
seats: seats,
|
||||
updatedBy: session.user.id,
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Handle settings update
|
||||
if (name !== undefined || slug !== undefined || logo !== undefined) {
|
||||
// Validate required fields
|
||||
if (name !== undefined && (!name || typeof name !== 'string' || name.trim().length === 0)) {
|
||||
return NextResponse.json({ error: 'Organization name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (slug !== undefined && (!slug || typeof slug !== 'string' || slug.trim().length === 0)) {
|
||||
return NextResponse.json({ error: 'Organization slug is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate slug format
|
||||
if (slug !== undefined) {
|
||||
const slugRegex = /^[a-z0-9-_]+$/
|
||||
if (!slugRegex.test(slug)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Slug can only contain lowercase letters, numbers, hyphens, and underscores',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if slug is already taken by another organization
|
||||
const existingSlug = await db
|
||||
.select()
|
||||
.from(organization)
|
||||
.where(and(eq(organization.slug, slug), ne(organization.id, organizationId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingSlug.length > 0) {
|
||||
return NextResponse.json({ error: 'This slug is already taken' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
// Build update object with only provided fields
|
||||
const updateData: any = { updatedAt: new Date() }
|
||||
if (name !== undefined) updateData.name = name.trim()
|
||||
if (slug !== undefined) updateData.slug = slug.trim()
|
||||
if (logo !== undefined) updateData.logo = logo || null
|
||||
|
||||
// Update organization
|
||||
const updatedOrg = await db
|
||||
.update(organization)
|
||||
.set(updateData)
|
||||
.where(eq(organization.id, organizationId))
|
||||
.returning()
|
||||
|
||||
if (updatedOrg.length === 0) {
|
||||
return NextResponse.json({ error: 'Organization not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info('Organization settings updated', {
|
||||
organizationId,
|
||||
updatedBy: session.user.id,
|
||||
changes: { name, slug, logo },
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Organization updated successfully',
|
||||
data: {
|
||||
id: updatedOrg[0].id,
|
||||
name: updatedOrg[0].name,
|
||||
slug: updatedOrg[0].slug,
|
||||
logo: updatedOrg[0].logo,
|
||||
updatedAt: updatedOrg[0].updatedAt,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({ error: 'No valid fields provided for update' }, { status: 400 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to update organization', {
|
||||
organizationId: (await params).id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE method removed - organization deletion not implemented
|
||||
// If deletion is needed in the future, it should be implemented with proper
|
||||
// cleanup of subscriptions, members, workspaces, and billing data
|
||||
@@ -1,209 +0,0 @@
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { member, permissions, user, workspace, workspaceMember } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||
|
||||
/**
|
||||
* GET /api/organizations/[id]/workspaces
|
||||
* Get workspaces related to the organization with optional filtering
|
||||
* Query parameters:
|
||||
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||
* - ?member=userId - Workspaces where specific member has access
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: organizationId } = await params
|
||||
const url = new URL(request.url)
|
||||
const availableOnly = url.searchParams.get('available') === 'true'
|
||||
const memberId = url.searchParams.get('member')
|
||||
|
||||
// Verify user is a member of this organization
|
||||
const memberEntry = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (memberEntry.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Forbidden - Not a member of this organization',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const userRole = memberEntry[0].role
|
||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||
|
||||
if (availableOnly) {
|
||||
// Get workspaces where user has admin permissions (can invite others)
|
||||
const availableWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, session.user.id),
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// User owns the workspace
|
||||
eq(workspace.ownerId, session.user.id),
|
||||
// User has admin permission on the workspace
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.permissionType, 'admin')
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
// Filter and format the results
|
||||
const workspacesWithInvitePermission = availableWorkspaces
|
||||
.filter((workspace) => {
|
||||
// Include if user owns the workspace OR has admin permission
|
||||
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||
})
|
||||
.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
canInvite: true, // All returned workspaces have invite permission
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
logger.info('Retrieved available workspaces for organization member', {
|
||||
organizationId,
|
||||
userId: session.user.id,
|
||||
workspaceCount: workspacesWithInvitePermission.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: workspacesWithInvitePermission,
|
||||
totalCount: workspacesWithInvitePermission.length,
|
||||
filter: 'available',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (memberId && hasAdminAccess) {
|
||||
// Get workspaces where specific member has access (admin only)
|
||||
const memberWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
isOwner: eq(workspace.ownerId, memberId),
|
||||
permissionType: permissions.permissionType,
|
||||
joinedAt: workspaceMember.joinedAt,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspace.id),
|
||||
eq(permissions.userId, memberId)
|
||||
)
|
||||
)
|
||||
.leftJoin(
|
||||
workspaceMember,
|
||||
and(eq(workspaceMember.workspaceId, workspace.id), eq(workspaceMember.userId, memberId))
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
// Member owns the workspace
|
||||
eq(workspace.ownerId, memberId),
|
||||
// Member has permissions on the workspace
|
||||
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||
)
|
||||
)
|
||||
|
||||
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
isOwner: workspace.isOwner,
|
||||
permission: workspace.permissionType,
|
||||
joinedAt: workspace.joinedAt,
|
||||
createdAt: workspace.createdAt,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: formattedWorkspaces,
|
||||
totalCount: formattedWorkspaces.length,
|
||||
filter: 'member',
|
||||
memberId,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Default: Get all workspaces (basic info only for regular members)
|
||||
if (!hasAdminAccess) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: [],
|
||||
totalCount: 0,
|
||||
message: 'Workspace access information is only available to organization admins',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// For admins: Get summary of all workspaces
|
||||
const allWorkspaces = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
name: workspace.name,
|
||||
ownerId: workspace.ownerId,
|
||||
createdAt: workspace.createdAt,
|
||||
ownerName: user.name,
|
||||
})
|
||||
.from(workspace)
|
||||
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
workspaces: allWorkspaces,
|
||||
totalCount: allWorkspaces.length,
|
||||
filter: 'all',
|
||||
},
|
||||
userRole,
|
||||
hasAdminAccess,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get organization workspaces', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,378 +0,0 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { invitation, member, permissions, workspaceInvitation, workspaceMember } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('OrganizationInvitationAcceptance')
|
||||
|
||||
// Accept an organization invitation and any associated workspace invitations
|
||||
export async function GET(req: NextRequest) {
|
||||
const invitationId = req.nextUrl.searchParams.get('id')
|
||||
|
||||
if (!invitationId) {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=missing-invitation-id',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
// Redirect to login, user will be redirected back after login
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
`/invite/organization?id=${invitationId}`,
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
// Find the organization invitation
|
||||
const invitationResult = await db
|
||||
.select()
|
||||
.from(invitation)
|
||||
.where(eq(invitation.id, invitationId))
|
||||
.limit(1)
|
||||
|
||||
if (invitationResult.length === 0) {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=invalid-invitation',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const orgInvitation = invitationResult[0]
|
||||
|
||||
// Check if invitation has expired
|
||||
if (orgInvitation.expiresAt && new Date() > orgInvitation.expiresAt) {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=expired',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Check if invitation is still pending
|
||||
if (orgInvitation.status !== 'pending') {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=already-processed',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Verify the email matches the current user
|
||||
if (orgInvitation.email !== session.user.email) {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=email-mismatch',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user is already a member of the organization
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.organizationId, orgInvitation.organizationId),
|
||||
eq(member.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMember.length > 0) {
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=already-member',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Start transaction to accept both organization and workspace invitations
|
||||
await db.transaction(async (tx) => {
|
||||
// Accept organization invitation - add user as member
|
||||
await tx.insert(member).values({
|
||||
id: randomUUID(),
|
||||
userId: session.user.id,
|
||||
organizationId: orgInvitation.organizationId,
|
||||
role: orgInvitation.role,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
|
||||
// Mark organization invitation as accepted
|
||||
await tx.update(invitation).set({ status: 'accepted' }).where(eq(invitation.id, invitationId))
|
||||
|
||||
// Find and accept any pending workspace invitations for the same email
|
||||
const workspaceInvitations = await tx
|
||||
.select()
|
||||
.from(workspaceInvitation)
|
||||
.where(
|
||||
and(
|
||||
eq(workspaceInvitation.email, orgInvitation.email),
|
||||
eq(workspaceInvitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
|
||||
for (const wsInvitation of workspaceInvitations) {
|
||||
// Check if invitation hasn't expired
|
||||
if (
|
||||
wsInvitation.expiresAt &&
|
||||
new Date().toISOString() <= wsInvitation.expiresAt.toISOString()
|
||||
) {
|
||||
// Check if user isn't already a member of the workspace
|
||||
const existingWorkspaceMember = await tx
|
||||
.select()
|
||||
.from(workspaceMember)
|
||||
.where(
|
||||
and(
|
||||
eq(workspaceMember.workspaceId, wsInvitation.workspaceId),
|
||||
eq(workspaceMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
// Check if user doesn't already have permissions on the workspace
|
||||
const existingPermission = await tx
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, wsInvitation.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingWorkspaceMember.length === 0 && existingPermission.length === 0) {
|
||||
// Add user as workspace member
|
||||
await tx.insert(workspaceMember).values({
|
||||
id: randomUUID(),
|
||||
workspaceId: wsInvitation.workspaceId,
|
||||
userId: session.user.id,
|
||||
role: wsInvitation.role,
|
||||
joinedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
// Add workspace permissions
|
||||
await tx.insert(permissions).values({
|
||||
id: randomUUID(),
|
||||
userId: session.user.id,
|
||||
entityType: 'workspace',
|
||||
entityId: wsInvitation.workspaceId,
|
||||
permissionType: wsInvitation.permissions,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
// Mark workspace invitation as accepted
|
||||
await tx
|
||||
.update(workspaceInvitation)
|
||||
.set({ status: 'accepted' })
|
||||
.where(eq(workspaceInvitation.id, wsInvitation.id))
|
||||
|
||||
logger.info('Accepted workspace invitation', {
|
||||
workspaceId: wsInvitation.workspaceId,
|
||||
userId: session.user.id,
|
||||
permission: wsInvitation.permissions,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('Successfully accepted batch invitation', {
|
||||
organizationId: orgInvitation.organizationId,
|
||||
userId: session.user.id,
|
||||
role: orgInvitation.role,
|
||||
})
|
||||
|
||||
// Redirect to success page or main app
|
||||
return NextResponse.redirect(
|
||||
new URL('/workspaces?invite=accepted', env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai')
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Failed to accept organization invitation', {
|
||||
invitationId,
|
||||
userId: session.user.id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.redirect(
|
||||
new URL(
|
||||
'/invite/invite-error?reason=server-error',
|
||||
env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// POST endpoint for programmatic acceptance (for API use)
|
||||
export async function POST(req: NextRequest) {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { invitationId } = await req.json()
|
||||
|
||||
if (!invitationId) {
|
||||
return NextResponse.json({ error: 'Missing invitationId' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Similar logic to GET but return JSON response
|
||||
const invitationResult = await db
|
||||
.select()
|
||||
.from(invitation)
|
||||
.where(eq(invitation.id, invitationId))
|
||||
.limit(1)
|
||||
|
||||
if (invitationResult.length === 0) {
|
||||
return NextResponse.json({ error: 'Invalid invitation' }, { status: 404 })
|
||||
}
|
||||
|
||||
const orgInvitation = invitationResult[0]
|
||||
|
||||
if (orgInvitation.expiresAt && new Date() > orgInvitation.expiresAt) {
|
||||
return NextResponse.json({ error: 'Invitation expired' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (orgInvitation.status !== 'pending') {
|
||||
return NextResponse.json({ error: 'Invitation already processed' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (orgInvitation.email !== session.user.email) {
|
||||
return NextResponse.json({ error: 'Email mismatch' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if user is already a member
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.organizationId, orgInvitation.organizationId),
|
||||
eq(member.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMember.length > 0) {
|
||||
return NextResponse.json({ error: 'Already a member' }, { status: 400 })
|
||||
}
|
||||
|
||||
let acceptedWorkspaces = 0
|
||||
|
||||
// Accept invitations in transaction
|
||||
await db.transaction(async (tx) => {
|
||||
// Accept organization invitation
|
||||
await tx.insert(member).values({
|
||||
id: randomUUID(),
|
||||
userId: session.user.id,
|
||||
organizationId: orgInvitation.organizationId,
|
||||
role: orgInvitation.role,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
|
||||
await tx.update(invitation).set({ status: 'accepted' }).where(eq(invitation.id, invitationId))
|
||||
|
||||
// Accept workspace invitations
|
||||
const workspaceInvitations = await tx
|
||||
.select()
|
||||
.from(workspaceInvitation)
|
||||
.where(
|
||||
and(
|
||||
eq(workspaceInvitation.email, orgInvitation.email),
|
||||
eq(workspaceInvitation.status, 'pending')
|
||||
)
|
||||
)
|
||||
|
||||
for (const wsInvitation of workspaceInvitations) {
|
||||
if (
|
||||
wsInvitation.expiresAt &&
|
||||
new Date().toISOString() <= wsInvitation.expiresAt.toISOString()
|
||||
) {
|
||||
const existingWorkspaceMember = await tx
|
||||
.select()
|
||||
.from(workspaceMember)
|
||||
.where(
|
||||
and(
|
||||
eq(workspaceMember.workspaceId, wsInvitation.workspaceId),
|
||||
eq(workspaceMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const existingPermission = await tx
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, wsInvitation.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingWorkspaceMember.length === 0 && existingPermission.length === 0) {
|
||||
await tx.insert(workspaceMember).values({
|
||||
id: randomUUID(),
|
||||
workspaceId: wsInvitation.workspaceId,
|
||||
userId: session.user.id,
|
||||
role: wsInvitation.role,
|
||||
joinedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
await tx.insert(permissions).values({
|
||||
id: randomUUID(),
|
||||
userId: session.user.id,
|
||||
entityType: 'workspace',
|
||||
entityId: wsInvitation.workspaceId,
|
||||
permissionType: wsInvitation.permissions,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
await tx
|
||||
.update(workspaceInvitation)
|
||||
.set({ status: 'accepted' })
|
||||
.where(eq(workspaceInvitation.id, wsInvitation.id))
|
||||
|
||||
acceptedWorkspaces++
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Successfully joined organization and ${acceptedWorkspaces} workspace(s)`,
|
||||
organizationId: orgInvitation.organizationId,
|
||||
workspacesJoined: acceptedWorkspaces,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to accept organization invitation via API', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { and, eq, lte, not, sql } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
@@ -13,6 +12,7 @@ import {
|
||||
getScheduleTimeValues,
|
||||
getSubBlockValue,
|
||||
} from '@/lib/schedules/utils'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
|
||||
480
apps/sim/app/api/tools/aws-lambda/deploy-endpoint/route.ts
Normal file
480
apps/sim/app/api/tools/aws-lambda/deploy-endpoint/route.ts
Normal file
@@ -0,0 +1,480 @@
|
||||
import {
|
||||
ApiGatewayV2Client,
|
||||
CreateApiCommand,
|
||||
CreateIntegrationCommand,
|
||||
CreateRouteCommand,
|
||||
CreateStageCommand,
|
||||
GetApisCommand,
|
||||
GetIntegrationsCommand,
|
||||
GetRoutesCommand,
|
||||
GetStagesCommand,
|
||||
} from '@aws-sdk/client-apigatewayv2'
|
||||
import { AddPermissionCommand, GetFunctionCommand, LambdaClient } from '@aws-sdk/client-lambda'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaDeployEndpointAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const DeployEndpointRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
endpointName: z.string().min(1, 'Endpoint name is required'),
|
||||
role: z.string().min(1, 'Role ARN is required'),
|
||||
})
|
||||
|
||||
type DeployEndpointRequest = z.infer<typeof DeployEndpointRequestSchema>
|
||||
|
||||
interface DeployEndpointResponse {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
endpointName: string
|
||||
endpointUrl: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
apiGatewayId: string
|
||||
stageName: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a Lambda function exists
|
||||
*/
|
||||
async function checkFunctionExists(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await lambdaClient.send(new GetFunctionCommand({ FunctionName: functionName }))
|
||||
return true
|
||||
} catch (error: any) {
|
||||
if (error.name === 'ResourceNotFoundException') {
|
||||
return false
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Lambda function details
|
||||
*/
|
||||
async function getFunctionDetails(lambdaClient: LambdaClient, functionName: string): Promise<any> {
|
||||
return await lambdaClient.send(new GetFunctionCommand({ FunctionName: functionName }))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if API Gateway HTTP API already exists
|
||||
*/
|
||||
async function checkApiExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiName: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const apis = await apiGatewayClient.send(new GetApisCommand({}))
|
||||
const existingApi = apis.Items?.find((api: any) => api.Name === apiName)
|
||||
return existingApi?.ApiId || null
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing API', { error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a route already exists for the API Gateway
|
||||
*/
|
||||
async function checkRouteExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
routeKey: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const routes = await apiGatewayClient.send(new GetRoutesCommand({ ApiId: apiId }))
|
||||
return routes.Items?.some((route: any) => route.RouteKey === routeKey) || false
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing route', { error })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an integration already exists for the API Gateway
|
||||
*/
|
||||
async function checkIntegrationExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
functionArn: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const integrations = await apiGatewayClient.send(new GetIntegrationsCommand({ ApiId: apiId }))
|
||||
const existingIntegration = integrations.Items?.find(
|
||||
(integration) => integration.IntegrationUri === functionArn
|
||||
)
|
||||
return existingIntegration?.IntegrationId || null
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing integration', { error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new API Gateway HTTP API
|
||||
*/
|
||||
async function createApiGateway(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiName: string
|
||||
): Promise<string> {
|
||||
const createApiResponse = await apiGatewayClient.send(
|
||||
new CreateApiCommand({
|
||||
Name: apiName,
|
||||
ProtocolType: 'HTTP',
|
||||
Description: `HTTP API for Lambda function ${apiName}`,
|
||||
})
|
||||
)
|
||||
|
||||
if (!createApiResponse.ApiId) {
|
||||
throw new Error('Failed to create API Gateway - no ID returned')
|
||||
}
|
||||
|
||||
return createApiResponse.ApiId
|
||||
}
|
||||
|
||||
/**
|
||||
* Create API Gateway integration with Lambda
|
||||
*/
|
||||
async function createApiIntegration(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
functionArn: string
|
||||
): Promise<string> {
|
||||
const integration = await apiGatewayClient.send(
|
||||
new CreateIntegrationCommand({
|
||||
ApiId: apiId,
|
||||
IntegrationType: 'AWS_PROXY',
|
||||
IntegrationUri: functionArn,
|
||||
IntegrationMethod: 'POST',
|
||||
PayloadFormatVersion: '2.0',
|
||||
})
|
||||
)
|
||||
|
||||
if (!integration.IntegrationId) {
|
||||
throw new Error('Failed to create integration - no ID returned')
|
||||
}
|
||||
|
||||
return integration.IntegrationId
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a route for the API Gateway
|
||||
*/
|
||||
async function createApiRoute(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
integrationId: string
|
||||
): Promise<void> {
|
||||
await apiGatewayClient.send(
|
||||
new CreateRouteCommand({
|
||||
ApiId: apiId,
|
||||
RouteKey: 'ANY /',
|
||||
Target: `integrations/${integrationId}`,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add Lambda permission for API Gateway
|
||||
*/
|
||||
async function addLambdaPermission(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
apiId: string,
|
||||
region: string,
|
||||
accountId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
await lambdaClient.send(
|
||||
new AddPermissionCommand({
|
||||
FunctionName: functionName,
|
||||
StatementId: `api-gateway-${apiId}`,
|
||||
Action: 'lambda:InvokeFunction',
|
||||
Principal: 'apigateway.amazonaws.com',
|
||||
SourceArn: `arn:aws:execute-api:${region}:${accountId}:${apiId}/*/*`,
|
||||
})
|
||||
)
|
||||
} catch (error: any) {
|
||||
// If permission already exists, that's fine
|
||||
if (error.name !== 'ResourceConflictException') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a stage exists for the API Gateway
|
||||
*/
|
||||
async function checkStageExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
stageName: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const stages = await apiGatewayClient.send(
|
||||
new GetStagesCommand({
|
||||
ApiId: apiId,
|
||||
})
|
||||
)
|
||||
return stages.Items?.some((stage: any) => stage.StageName === stageName) || false
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing stage', { error })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a stage for the API Gateway
|
||||
*/
|
||||
async function createApiStage(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string
|
||||
): Promise<string> {
|
||||
const stageName = 'prod'
|
||||
|
||||
// Check if stage already exists
|
||||
const stageExists = await checkStageExists(apiGatewayClient, apiId, stageName)
|
||||
|
||||
if (stageExists) {
|
||||
logger.info(`Stage ${stageName} already exists for API ${apiId}`)
|
||||
return stageName
|
||||
}
|
||||
|
||||
logger.info(`Creating new stage ${stageName} for API ${apiId}`)
|
||||
const stage = await apiGatewayClient.send(
|
||||
new CreateStageCommand({
|
||||
ApiId: apiId,
|
||||
StageName: stageName,
|
||||
AutoDeploy: true,
|
||||
})
|
||||
)
|
||||
|
||||
return stage.StageName || stageName
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure API is deployed by waiting for deployment to complete
|
||||
*/
|
||||
async function ensureApiDeployed(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
stageName: string
|
||||
): Promise<void> {
|
||||
// In API Gateway v2, AutoDeploy: true should handle deployment automatically
|
||||
// But we can add a small delay to ensure the deployment completes
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000))
|
||||
|
||||
logger.info(`API Gateway deployment completed for API ${apiId}, stage ${stageName}`)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda deploy endpoint request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
// Log the raw request body for debugging
|
||||
logger.info(`[${requestId}] Raw request body received`, {
|
||||
body: JSON.stringify(body, null, 2),
|
||||
})
|
||||
|
||||
const validationResult = DeployEndpointRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, { errors: validationResult.error.errors })
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the deployment payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda deploy endpoint payload received`, {
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deploying Lambda function as endpoint: ${params.functionName}`)
|
||||
|
||||
// Create Lambda client
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Create API Gateway v2 client
|
||||
const apiGatewayClient = new ApiGatewayV2Client({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Check if Lambda function exists
|
||||
const functionExists = await checkFunctionExists(lambdaClient, params.functionName)
|
||||
if (!functionExists) {
|
||||
logger.error(`[${requestId}] Lambda function ${params.functionName} does not exist`)
|
||||
return createErrorResponse(
|
||||
`Lambda function ${params.functionName} does not exist. Please deploy the function first.`,
|
||||
404,
|
||||
'FUNCTION_NOT_FOUND'
|
||||
)
|
||||
}
|
||||
|
||||
// Get function details
|
||||
const functionDetails = await getFunctionDetails(lambdaClient, params.functionName)
|
||||
const functionArn = functionDetails.Configuration?.FunctionArn
|
||||
|
||||
if (!functionArn) {
|
||||
logger.error(`[${requestId}] Failed to get function ARN for ${params.functionName}`)
|
||||
return createErrorResponse('Failed to get function ARN', 500, 'FUNCTION_ARN_ERROR')
|
||||
}
|
||||
|
||||
// Extract account ID from function ARN
|
||||
const accountId = functionArn.split(':')[4]
|
||||
if (!accountId) {
|
||||
logger.error(`[${requestId}] Failed to extract account ID from function ARN: ${functionArn}`)
|
||||
return createErrorResponse(
|
||||
'Failed to extract account ID from function ARN',
|
||||
500,
|
||||
'ACCOUNT_ID_ERROR'
|
||||
)
|
||||
}
|
||||
|
||||
// Check if API Gateway already exists
|
||||
let apiId = await checkApiExists(apiGatewayClient, params.endpointName)
|
||||
|
||||
if (!apiId) {
|
||||
logger.info(`[${requestId}] Creating new API Gateway HTTP API: ${params.endpointName}`)
|
||||
apiId = await createApiGateway(apiGatewayClient, params.endpointName)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${requestId}] Using existing API Gateway HTTP API: ${params.endpointName} (${apiId})`
|
||||
)
|
||||
}
|
||||
|
||||
// Check if integration already exists before creating a new one
|
||||
let integrationId = await checkIntegrationExists(apiGatewayClient, apiId, functionArn)
|
||||
|
||||
if (integrationId) {
|
||||
logger.info(
|
||||
`[${requestId}] Integration for function ${params.functionName} already exists for API ${apiId}, using existing integration`
|
||||
)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Creating API Gateway integration`)
|
||||
integrationId = await createApiIntegration(apiGatewayClient, apiId, functionArn)
|
||||
}
|
||||
|
||||
// Check if route already exists before creating a new one
|
||||
const routeKey = 'ANY /'
|
||||
const routeExists = await checkRouteExists(apiGatewayClient, apiId, routeKey)
|
||||
|
||||
if (routeExists) {
|
||||
logger.info(
|
||||
`[${requestId}] Route ${routeKey} already exists for API ${apiId}, skipping route creation`
|
||||
)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Creating API Gateway route`)
|
||||
await createApiRoute(apiGatewayClient, apiId, integrationId)
|
||||
}
|
||||
|
||||
// Add Lambda permission for API Gateway
|
||||
logger.info(`[${requestId}] Adding Lambda permission for API Gateway`)
|
||||
await addLambdaPermission(lambdaClient, params.functionName, apiId, params.region, accountId)
|
||||
|
||||
// Create stage for the API Gateway
|
||||
logger.info(`[${requestId}] Creating API Gateway stage`)
|
||||
const stageName = await createApiStage(apiGatewayClient, apiId)
|
||||
|
||||
if (!stageName) {
|
||||
logger.error(`[${requestId}] Failed to create or get stage for API ${apiId}`)
|
||||
return createErrorResponse('Failed to create API Gateway stage', 500, 'STAGE_CREATION_ERROR')
|
||||
}
|
||||
|
||||
// Ensure API is deployed
|
||||
logger.info(`[${requestId}] Ensuring API Gateway deployment is complete`)
|
||||
await ensureApiDeployed(apiGatewayClient, apiId, stageName)
|
||||
|
||||
// Construct the endpoint URL
|
||||
const endpointUrl = `https://${apiId}.execute-api.${params.region}.amazonaws.com/${stageName}/`
|
||||
|
||||
const response: DeployEndpointResponse = {
|
||||
functionArn,
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
endpointUrl,
|
||||
region: params.region,
|
||||
status: 'ACTIVE',
|
||||
lastModified: new Date().toISOString(),
|
||||
apiGatewayId: apiId,
|
||||
stageName,
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Lambda function endpoint deployment completed successfully`, {
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
endpointUrl,
|
||||
apiGatewayId: apiId,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: response,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deploying Lambda function endpoint`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
let errorMessage = 'Failed to deploy Lambda function endpoint'
|
||||
let statusCode = 500
|
||||
|
||||
if (error.name === 'AccessDeniedException') {
|
||||
errorMessage = 'Access denied. Please check your AWS credentials and permissions.'
|
||||
statusCode = 403
|
||||
} else if (error.name === 'InvalidParameterValueException') {
|
||||
errorMessage = `Invalid parameter: ${error.message}`
|
||||
statusCode = 400
|
||||
} else if (error.name === 'ResourceConflictException') {
|
||||
errorMessage = 'Resource conflict. The API may be in use or being updated.'
|
||||
statusCode = 409
|
||||
} else if (error.name === 'ServiceException') {
|
||||
errorMessage = 'AWS service error. Please try again later.'
|
||||
statusCode = 503
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
return createErrorResponse(errorMessage, statusCode, 'DEPLOYMENT_ERROR')
|
||||
}
|
||||
}
|
||||
442
apps/sim/app/api/tools/aws-lambda/deploy/route.ts
Normal file
442
apps/sim/app/api/tools/aws-lambda/deploy/route.ts
Normal file
@@ -0,0 +1,442 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import { tmpdir } from 'os'
|
||||
import { join } from 'path'
|
||||
import { GetFunctionCommand, LambdaClient } from '@aws-sdk/client-lambda'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaDeployAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const DeployRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
handler: z.string().optional(),
|
||||
runtime: z.string().min(1, 'Runtime is required'),
|
||||
code: z
|
||||
.record(z.string())
|
||||
.refine((val) => Object.keys(val).length > 0, 'At least one code file is required'),
|
||||
|
||||
timeout: z.coerce.number().min(1).max(900).optional().default(3),
|
||||
memorySize: z.coerce.number().min(128).max(10240).optional().default(128),
|
||||
environmentVariables: z.record(z.string()).default({}),
|
||||
tags: z.record(z.string()).default({}),
|
||||
role: z.string().min(1, 'Role ARN is required'),
|
||||
})
|
||||
|
||||
type DeployRequest = z.infer<typeof DeployRequestSchema>
|
||||
|
||||
interface LambdaFunctionDetails {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the appropriate file extension for the given runtime
|
||||
*/
|
||||
function getFileExtension(runtime: string): string {
|
||||
if (runtime.startsWith('nodejs')) return 'js'
|
||||
if (runtime.startsWith('python')) return 'py'
|
||||
if (runtime.startsWith('java')) return 'java'
|
||||
if (runtime.startsWith('dotnet')) return 'cs'
|
||||
if (runtime.startsWith('go')) return 'go'
|
||||
if (runtime.startsWith('ruby')) return 'rb'
|
||||
return 'js' // default
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize function name for SAM/CloudFormation resource naming
|
||||
* SAM resource names must be alphanumeric only (letters and numbers)
|
||||
*/
|
||||
function sanitizeResourceName(functionName: string): string {
|
||||
return (
|
||||
functionName
|
||||
.replace(/[^a-zA-Z0-9]/g, '') // Remove all non-alphanumeric characters
|
||||
.replace(/^(\d)/, 'Func$1') // Ensure it starts with a letter if it starts with a number
|
||||
.substring(0, 64) || // Ensure reasonable length limit
|
||||
'LambdaFunction'
|
||||
) // Fallback if name becomes empty
|
||||
}
|
||||
|
||||
/**
|
||||
* Create SAM template for the Lambda function
|
||||
*/
|
||||
function createSamTemplate(params: DeployRequest): string {
|
||||
// Sanitize the function name for CloudFormation resource naming
|
||||
const resourceName = sanitizeResourceName(params.functionName)
|
||||
|
||||
const template = {
|
||||
AWSTemplateFormatVersion: '2010-09-09',
|
||||
Transform: 'AWS::Serverless-2016-10-31',
|
||||
Resources: {
|
||||
[resourceName]: {
|
||||
Type: 'AWS::Serverless::Function',
|
||||
Properties: {
|
||||
FunctionName: params.functionName, // Use original function name for actual Lambda function
|
||||
CodeUri: './src',
|
||||
Handler: params.handler,
|
||||
Runtime: params.runtime,
|
||||
Role: params.role,
|
||||
Timeout: params.timeout,
|
||||
MemorySize: params.memorySize,
|
||||
Environment: {
|
||||
Variables: params.environmentVariables,
|
||||
},
|
||||
Tags: params.tags,
|
||||
},
|
||||
},
|
||||
},
|
||||
Outputs: {
|
||||
FunctionArn: {
|
||||
Value: { 'Fn::GetAtt': [resourceName, 'Arn'] },
|
||||
Export: { Name: `${params.functionName}-Arn` },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return JSON.stringify(template, null, 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a shell command and return the result
|
||||
*/
|
||||
async function execCommand(
|
||||
command: string,
|
||||
cwd: string,
|
||||
env?: Record<string, string>
|
||||
): Promise<{ stdout: string; stderr: string }> {
|
||||
const { exec } = await import('child_process')
|
||||
const { promisify } = await import('util')
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
return await execAsync(command, {
|
||||
cwd,
|
||||
env: env ? { ...process.env, ...env } : process.env,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Deploy Lambda function using SAM CLI
|
||||
*/
|
||||
async function deployWithSam(
|
||||
params: DeployRequest,
|
||||
requestId: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
const tempDir = join(tmpdir(), `lambda-deploy-${requestId}`)
|
||||
const srcDir = join(tempDir, 'src')
|
||||
|
||||
try {
|
||||
// Create temporary directory structure
|
||||
await fs.mkdir(tempDir, { recursive: true })
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
logger.info(`[${requestId}] Created temporary directory: ${tempDir}`)
|
||||
|
||||
// Write SAM template
|
||||
const samTemplate = createSamTemplate(params)
|
||||
await fs.writeFile(join(tempDir, 'template.yaml'), samTemplate)
|
||||
|
||||
logger.info(`[${requestId}] Created SAM template`)
|
||||
|
||||
// Write source code files
|
||||
for (const [filePath, codeContent] of Object.entries(params.code)) {
|
||||
const fullPath = join(srcDir, filePath)
|
||||
const fileDir = join(fullPath, '..')
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(fileDir, { recursive: true })
|
||||
await fs.writeFile(fullPath, codeContent)
|
||||
|
||||
logger.info(`[${requestId}] Created source file: ${filePath}`)
|
||||
}
|
||||
|
||||
// Set AWS credentials in environment
|
||||
const env = {
|
||||
AWS_ACCESS_KEY_ID: params.accessKeyId,
|
||||
AWS_SECRET_ACCESS_KEY: params.secretAccessKey,
|
||||
AWS_DEFAULT_REGION: params.region,
|
||||
}
|
||||
|
||||
// Build the SAM application
|
||||
logger.info(`[${requestId}] Building SAM application...`)
|
||||
const buildCommand = 'sam build --no-cached'
|
||||
const buildResult = await execCommand(buildCommand, tempDir, env)
|
||||
|
||||
logger.info(`[${requestId}] SAM build output:`, {
|
||||
stdout: buildResult.stdout,
|
||||
stderr: buildResult.stderr,
|
||||
})
|
||||
|
||||
if (buildResult.stderr && !buildResult.stderr.includes('Successfully built')) {
|
||||
logger.warn(`[${requestId}] SAM build warnings:`, { stderr: buildResult.stderr })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] SAM build completed`)
|
||||
|
||||
// Deploy the SAM application
|
||||
logger.info(`[${requestId}] Deploying SAM application...`)
|
||||
const stackName = `${sanitizeResourceName(params.functionName)}Stack`
|
||||
const deployCommand = [
|
||||
'sam deploy',
|
||||
'--no-confirm-changeset',
|
||||
'--no-fail-on-empty-changeset',
|
||||
`--stack-name ${stackName}`,
|
||||
`--region ${params.region}`,
|
||||
'--resolve-s3',
|
||||
'--capabilities CAPABILITY_IAM',
|
||||
'--no-progressbar',
|
||||
].join(' ')
|
||||
|
||||
const deployResult = await execCommand(deployCommand, tempDir, env)
|
||||
|
||||
logger.info(`[${requestId}] SAM deploy output:`, {
|
||||
stdout: deployResult.stdout,
|
||||
stderr: deployResult.stderr,
|
||||
})
|
||||
|
||||
if (
|
||||
deployResult.stderr &&
|
||||
!deployResult.stderr.includes('Successfully created/updated stack')
|
||||
) {
|
||||
logger.warn(`[${requestId}] SAM deploy warnings:`, { stderr: deployResult.stderr })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] SAM deploy completed`)
|
||||
|
||||
// Get function details using AWS SDK
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
const functionDetails = await getFunctionDetails(
|
||||
lambdaClient,
|
||||
params.functionName,
|
||||
params.region
|
||||
)
|
||||
|
||||
return functionDetails
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error during SAM deployment`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true })
|
||||
logger.info(`[${requestId}] Cleaned up temporary directory: ${tempDir}`)
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`[${requestId}] Failed to clean up temporary directory`, {
|
||||
error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed information about a Lambda function
|
||||
*/
|
||||
async function getFunctionDetails(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
region: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
const functionDetails = await lambdaClient.send(
|
||||
new GetFunctionCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
return {
|
||||
functionArn: functionDetails.Configuration?.FunctionArn || '',
|
||||
functionName: functionDetails.Configuration?.FunctionName || '',
|
||||
runtime: functionDetails.Configuration?.Runtime || '',
|
||||
region,
|
||||
status: functionDetails.Configuration?.State || '',
|
||||
lastModified: functionDetails.Configuration?.LastModified || '',
|
||||
codeSize: functionDetails.Configuration?.CodeSize || 0,
|
||||
description: functionDetails.Configuration?.Description || '',
|
||||
timeout: functionDetails.Configuration?.Timeout || 0,
|
||||
memorySize: functionDetails.Configuration?.MemorySize || 0,
|
||||
environment: functionDetails.Configuration?.Environment?.Variables || {},
|
||||
tags: functionDetails.Tags || {},
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda deployment request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Request body received:`, {
|
||||
body,
|
||||
codeType: typeof body.code,
|
||||
codeValue: body.code,
|
||||
})
|
||||
|
||||
// Parse the code field if it's a JSON string
|
||||
if (typeof body.code === 'string') {
|
||||
try {
|
||||
body.code = JSON.parse(body.code)
|
||||
logger.info(`[${requestId}] Parsed code field:`, { parsedCode: body.code })
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse code field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
codeString: body.code,
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in code field', 400, 'INVALID_CODE_JSON')
|
||||
}
|
||||
}
|
||||
|
||||
// Runtime field should be a string, no JSON parsing needed
|
||||
if (typeof body.runtime !== 'string') {
|
||||
logger.error(`[${requestId}] Runtime field must be a string`, {
|
||||
runtimeType: typeof body.runtime,
|
||||
runtimeValue: body.runtime,
|
||||
})
|
||||
return createErrorResponse('Runtime field must be a string', 400, 'INVALID_RUNTIME_TYPE')
|
||||
}
|
||||
|
||||
// Parse the timeout field if it's a JSON string
|
||||
if (typeof body.timeout === 'string') {
|
||||
try {
|
||||
body.timeout = JSON.parse(body.timeout)
|
||||
logger.info(`[${requestId}] Parsed timeout field:`, { parsedTimeout: body.timeout })
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse timeout field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
timeoutString: body.timeout,
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in timeout field', 400, 'INVALID_TIMEOUT_JSON')
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the memorySize field if it's a JSON string
|
||||
if (typeof body.memorySize === 'string') {
|
||||
try {
|
||||
body.memorySize = JSON.parse(body.memorySize)
|
||||
logger.info(`[${requestId}] Parsed memorySize field:`, {
|
||||
parsedMemorySize: body.memorySize,
|
||||
})
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse memorySize field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
memorySizeString: body.memorySize,
|
||||
})
|
||||
return createErrorResponse(
|
||||
'Invalid JSON in memorySize field',
|
||||
400,
|
||||
'INVALID_MEMORYSIZE_JSON'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const validationResult = DeployRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, {
|
||||
errors: validationResult.error.errors,
|
||||
codeField: body.code,
|
||||
codeType: typeof body.code,
|
||||
hasCode: 'code' in body,
|
||||
bodyKeys: Object.keys(body),
|
||||
})
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the deployment payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda deployment payload received`, {
|
||||
functionName: params.functionName,
|
||||
region: params.region,
|
||||
runtime: params.runtime,
|
||||
handler: params.handler,
|
||||
timeout: params.timeout,
|
||||
memorySize: params.memorySize,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
codeFiles: Object.keys(params.code),
|
||||
codeFilesCount: Object.keys(params.code).length,
|
||||
environmentVariables: params.environmentVariables,
|
||||
environmentVariablesCount: Object.keys(params.environmentVariables || {}).length,
|
||||
tags: params.tags,
|
||||
tagsCount: Object.keys(params.tags || {}).length,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deploying Lambda function with SAM: ${params.functionName}`)
|
||||
|
||||
// Deploy using SAM CLI
|
||||
const functionDetails = await deployWithSam(params, requestId)
|
||||
|
||||
logger.info(`[${requestId}] Lambda function deployment completed successfully`, {
|
||||
functionName: params.functionName,
|
||||
functionArn: functionDetails.functionArn,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: functionDetails,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deploying Lambda function`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
let errorMessage = 'Failed to deploy Lambda function'
|
||||
let statusCode = 500
|
||||
|
||||
if (error.message?.includes('sam: command not found')) {
|
||||
errorMessage = 'SAM CLI is not installed or not available in PATH'
|
||||
statusCode = 500
|
||||
} else if (error.name === 'AccessDeniedException') {
|
||||
errorMessage = 'Access denied. Please check your AWS credentials and permissions.'
|
||||
statusCode = 403
|
||||
} else if (error.name === 'InvalidParameterValueException') {
|
||||
errorMessage = `Invalid parameter: ${error.message}`
|
||||
statusCode = 400
|
||||
} else if (error.name === 'ResourceConflictException') {
|
||||
errorMessage = 'Resource conflict. The function may be in use or being updated.'
|
||||
statusCode = 409
|
||||
} else if (error.name === 'ServiceException') {
|
||||
errorMessage = 'AWS Lambda service error. Please try again later.'
|
||||
statusCode = 503
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
return createErrorResponse(errorMessage, statusCode, 'DEPLOYMENT_ERROR')
|
||||
}
|
||||
}
|
||||
322
apps/sim/app/api/tools/aws-lambda/fetch/route.ts
Normal file
322
apps/sim/app/api/tools/aws-lambda/fetch/route.ts
Normal file
@@ -0,0 +1,322 @@
|
||||
import {
|
||||
GetFunctionCommand,
|
||||
GetFunctionConfigurationCommand,
|
||||
LambdaClient,
|
||||
} from '@aws-sdk/client-lambda'
|
||||
import JSZip from 'jszip'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AWSLambdaFetchAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const FetchRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
role: z.string().min(1, 'IAM Role ARN is required'),
|
||||
})
|
||||
|
||||
type FetchRequest = z.infer<typeof FetchRequestSchema>
|
||||
|
||||
interface LambdaFunctionDetails {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
codeFiles: Record<string, string>
|
||||
handler: string
|
||||
role: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract code from Lambda function ZIP file
|
||||
*/
|
||||
async function extractCodeFromZip(
|
||||
zipBuffer: Buffer,
|
||||
runtime: string
|
||||
): Promise<{ mainCode: string; allFiles: Record<string, string> }> {
|
||||
try {
|
||||
const zip = await JSZip.loadAsync(zipBuffer)
|
||||
const allFiles = Object.keys(zip.files)
|
||||
logger.info('Files in ZIP:', allFiles)
|
||||
|
||||
// Extract all text files
|
||||
const allFilesContent: Record<string, string> = {}
|
||||
let mainCode = ''
|
||||
|
||||
// Determine the main file based on runtime
|
||||
let mainFile = 'index.js' // default
|
||||
if (runtime.startsWith('python')) {
|
||||
mainFile = 'index.py'
|
||||
} else if (runtime.startsWith('java')) {
|
||||
mainFile = 'index.java'
|
||||
} else if (runtime.startsWith('dotnet')) {
|
||||
mainFile = 'index.cs'
|
||||
} else if (runtime.startsWith('go')) {
|
||||
mainFile = 'index.go'
|
||||
} else if (runtime.startsWith('ruby')) {
|
||||
mainFile = 'index.rb'
|
||||
}
|
||||
|
||||
logger.info('Looking for main file:', mainFile)
|
||||
|
||||
// Extract all non-directory files
|
||||
for (const fileName of allFiles) {
|
||||
if (!fileName.endsWith('/')) {
|
||||
try {
|
||||
const fileContent = await zip.file(fileName)?.async('string')
|
||||
if (fileContent !== undefined) {
|
||||
allFilesContent[fileName] = fileContent
|
||||
|
||||
// Set main code if this is the main file
|
||||
if (fileName === mainFile) {
|
||||
mainCode = fileContent
|
||||
logger.info('Found main file content, length:', mainCode.length)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to extract file ${fileName}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If main file not found, try to find any code file
|
||||
if (!mainCode) {
|
||||
const codeFiles = Object.keys(allFilesContent).filter(
|
||||
(file) =>
|
||||
file.endsWith('.js') ||
|
||||
file.endsWith('.py') ||
|
||||
file.endsWith('.java') ||
|
||||
file.endsWith('.cs') ||
|
||||
file.endsWith('.go') ||
|
||||
file.endsWith('.rb')
|
||||
)
|
||||
|
||||
logger.info('Found code files:', codeFiles)
|
||||
|
||||
if (codeFiles.length > 0) {
|
||||
const firstCodeFile = codeFiles[0]
|
||||
mainCode = allFilesContent[firstCodeFile]
|
||||
logger.info('Using first code file as main, length:', mainCode.length)
|
||||
}
|
||||
}
|
||||
|
||||
// If still no main code, use the first file
|
||||
if (!mainCode && Object.keys(allFilesContent).length > 0) {
|
||||
const firstFile = Object.keys(allFilesContent)[0]
|
||||
mainCode = allFilesContent[firstFile]
|
||||
logger.info('Using first file as main, length:', mainCode.length)
|
||||
}
|
||||
|
||||
logger.info(`Extracted ${Object.keys(allFilesContent).length} files`)
|
||||
return { mainCode, allFiles: allFilesContent }
|
||||
} catch (error) {
|
||||
logger.error('Failed to extract code from ZIP', { error })
|
||||
return { mainCode: '', allFiles: {} }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed information about a Lambda function including code
|
||||
*/
|
||||
async function getFunctionDetailsWithCode(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
region: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
// Get function configuration
|
||||
const functionConfig = await lambdaClient.send(
|
||||
new GetFunctionConfigurationCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
// Get function code
|
||||
const functionCode = await lambdaClient.send(
|
||||
new GetFunctionCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
let codeFiles: Record<string, string> = {}
|
||||
if (functionCode.Code?.Location) {
|
||||
try {
|
||||
logger.info('Downloading code from:', functionCode.Code.Location)
|
||||
|
||||
const response = await fetch(functionCode.Code.Location)
|
||||
logger.info('Fetch response status:', response.status)
|
||||
|
||||
if (response.ok) {
|
||||
const zipBuffer = Buffer.from(await response.arrayBuffer())
|
||||
logger.info('ZIP buffer size:', zipBuffer.length)
|
||||
const extractedCode = await extractCodeFromZip(zipBuffer, functionConfig.Runtime || '')
|
||||
codeFiles = extractedCode.allFiles
|
||||
logger.info('Extracted files count:', Object.keys(codeFiles).length)
|
||||
} else {
|
||||
logger.warn('Fetch failed with status:', response.status)
|
||||
const errorText = await response.text()
|
||||
logger.warn('Error response:', errorText)
|
||||
}
|
||||
} catch (fetchError) {
|
||||
logger.error('Failed to download function code using fetch', { fetchError })
|
||||
}
|
||||
} else {
|
||||
logger.info('No code location found in function response')
|
||||
}
|
||||
|
||||
return {
|
||||
functionArn: functionConfig.FunctionArn || '',
|
||||
functionName: functionConfig.FunctionName || '',
|
||||
runtime: functionConfig.Runtime || '',
|
||||
region,
|
||||
status: functionConfig.State || '',
|
||||
lastModified: functionConfig.LastModified || '',
|
||||
codeSize: functionConfig.CodeSize || 0,
|
||||
description: functionConfig.Description || '',
|
||||
timeout: functionConfig.Timeout || 0,
|
||||
memorySize: functionConfig.MemorySize || 0,
|
||||
environment: functionConfig.Environment?.Variables || {},
|
||||
tags: {}, // Tags need to be fetched separately if needed
|
||||
codeFiles,
|
||||
handler: functionConfig.Handler || '',
|
||||
role: functionConfig.Role || '',
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda fetch request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
const validationResult = FetchRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, { errors: validationResult.error.errors })
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda fetch payload received`, {
|
||||
functionName: params.functionName,
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasFunctionName: !!params.functionName,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Fetching Lambda function: ${params.functionName}`)
|
||||
|
||||
// Create Lambda client
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Fetch function details and code
|
||||
try {
|
||||
const functionDetails = await getFunctionDetailsWithCode(
|
||||
lambdaClient,
|
||||
params.functionName,
|
||||
params.region,
|
||||
params.accessKeyId,
|
||||
params.secretAccessKey
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Successfully fetched Lambda function: ${params.functionName}`, {
|
||||
functionName: functionDetails.functionName,
|
||||
filesCount: Object.keys(functionDetails.codeFiles).length,
|
||||
hasFiles: Object.keys(functionDetails.codeFiles).length > 0,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: functionDetails,
|
||||
})
|
||||
} catch (fetchError: any) {
|
||||
// Handle ResourceNotFoundException gracefully - return empty function details
|
||||
if (fetchError.name === 'ResourceNotFoundException') {
|
||||
logger.info(
|
||||
`[${requestId}] Lambda function '${params.functionName}' not found, returning empty response`
|
||||
)
|
||||
|
||||
const emptyFunctionDetails: LambdaFunctionDetails = {
|
||||
functionArn: '',
|
||||
functionName: params.functionName,
|
||||
runtime: '',
|
||||
region: params.region,
|
||||
status: '',
|
||||
lastModified: '',
|
||||
codeSize: 0,
|
||||
description: '',
|
||||
timeout: 0,
|
||||
memorySize: 0,
|
||||
environment: {},
|
||||
tags: {},
|
||||
codeFiles: {},
|
||||
handler: '',
|
||||
role: '',
|
||||
}
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: emptyFunctionDetails,
|
||||
})
|
||||
}
|
||||
|
||||
// Re-throw other errors to be handled by the outer catch block
|
||||
throw fetchError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Failed to fetch Lambda function`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
// Note: ResourceNotFoundException is now handled gracefully in the inner try-catch
|
||||
|
||||
if (error.name === 'AccessDeniedException') {
|
||||
return createErrorResponse(
|
||||
'Access denied. Please check your AWS credentials and permissions.',
|
||||
403,
|
||||
'ACCESS_DENIED'
|
||||
)
|
||||
}
|
||||
|
||||
if (error.name === 'InvalidParameterValueException') {
|
||||
return createErrorResponse('Invalid parameter value provided', 400, 'INVALID_PARAMETER')
|
||||
}
|
||||
|
||||
return createErrorResponse('Failed to fetch Lambda function', 500, 'FETCH_ERROR')
|
||||
}
|
||||
}
|
||||
91
apps/sim/app/api/tools/aws-lambda/get-prompts/route.ts
Normal file
91
apps/sim/app/api/tools/aws-lambda/get-prompts/route.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaGetPromptsAPI')
|
||||
|
||||
// Constants for getPrompts operation
|
||||
const system_prompt = `You are an expert in writing aws lambda functions. The user will provide an input which may contain the the existing lambda code, or they may not. If the initial code is provided, make the changes to the initial code to reflect what the user wants. If no code is provided, your job is to write the lambda function, choosing a runtime and handler.
|
||||
|
||||
Your output should be a valid JSON object, with the following structure:
|
||||
|
||||
[
|
||||
"runtime": runtime string,
|
||||
"handler": handler,
|
||||
"timeout": timeout,
|
||||
"memory": memory,
|
||||
"files":
|
||||
{
|
||||
"file_path_1": "code string for first file",
|
||||
"file_path_2": "code string for second file"
|
||||
}
|
||||
]`
|
||||
|
||||
const schema = {
|
||||
name: 'aws_lambda_function',
|
||||
description: 'Defines the structure for an AWS Lambda function configuration.',
|
||||
strict: true,
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
runtime: {
|
||||
type: 'string',
|
||||
description: 'The runtime environment for the Lambda function.',
|
||||
},
|
||||
handler: {
|
||||
type: 'string',
|
||||
description: 'The function handler that Lambda calls to start execution.',
|
||||
},
|
||||
memory: {
|
||||
type: 'integer',
|
||||
description: 'The amount of memory allocated to the Lambda function in MB (128-10240).',
|
||||
minimum: 128,
|
||||
maximum: 10240,
|
||||
},
|
||||
timeout: {
|
||||
type: 'integer',
|
||||
description: 'The maximum execution time for the Lambda function in seconds (1-900).',
|
||||
minimum: 1,
|
||||
maximum: 900,
|
||||
},
|
||||
files: {
|
||||
type: 'object',
|
||||
description: 'A mapping of file paths to their respective code strings.',
|
||||
additionalProperties: {
|
||||
type: 'string',
|
||||
description: 'The code string for a specific file.',
|
||||
},
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['runtime', 'handler', 'files', 'memory', 'timeout'],
|
||||
},
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda get prompts request`)
|
||||
|
||||
// No validation needed since this endpoint doesn't require any parameters
|
||||
// Just return the hardcoded system prompt and schema
|
||||
|
||||
logger.info(`[${requestId}] Returning system prompt and schema`)
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: {
|
||||
systemPrompt: system_prompt,
|
||||
schema: schema,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error in get prompts operation`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse('Failed to get prompts and schema', 500, 'GET_PROMPTS_ERROR')
|
||||
}
|
||||
}
|
||||
@@ -1,213 +0,0 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { db } from '@/db'
|
||||
import { workflow as workflowTable } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('GetUserWorkflowAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { workflowId, includeMetadata = false } = body
|
||||
|
||||
if (!workflowId) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Workflow ID is required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info('Fetching workflow for YAML generation', { workflowId })
|
||||
|
||||
// Fetch workflow from database
|
||||
const [workflowRecord] = await db
|
||||
.select()
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: `Workflow ${workflowId} not found` },
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
// Try to load from normalized tables first, fallback to JSON blob
|
||||
let workflowState: any = null
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (normalizedData) {
|
||||
workflowState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
}
|
||||
|
||||
// Extract subblock values from normalized data
|
||||
Object.entries(normalizedData.blocks).forEach(([blockId, block]) => {
|
||||
subBlockValues[blockId] = {}
|
||||
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
|
||||
if ((subBlock as any).value !== undefined) {
|
||||
subBlockValues[blockId][subBlockId] = (subBlock as any).value
|
||||
}
|
||||
})
|
||||
})
|
||||
} else if (workflowRecord.state) {
|
||||
// Fallback to JSON blob
|
||||
workflowState = workflowRecord.state as any
|
||||
// For JSON blob, subblock values are embedded in the block state
|
||||
Object.entries((workflowState.blocks as any) || {}).forEach(([blockId, block]) => {
|
||||
subBlockValues[blockId] = {}
|
||||
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
|
||||
if ((subBlock as any).value !== undefined) {
|
||||
subBlockValues[blockId][subBlockId] = (subBlock as any).value
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (!workflowState || !workflowState.blocks) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Workflow state is empty or invalid' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Generate YAML using server-side function
|
||||
const yaml = generateWorkflowYaml(workflowState, subBlockValues)
|
||||
|
||||
if (!yaml || yaml.trim() === '') {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Generated YAML is empty' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Generate detailed block information with schemas
|
||||
const blockSchemas: Record<string, any> = {}
|
||||
Object.entries(workflowState.blocks).forEach(([blockId, blockState]) => {
|
||||
const block = blockState as any
|
||||
const blockConfig = getBlock(block.type)
|
||||
|
||||
if (blockConfig) {
|
||||
blockSchemas[blockId] = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
description: blockConfig.description,
|
||||
longDescription: blockConfig.longDescription,
|
||||
category: blockConfig.category,
|
||||
docsLink: blockConfig.docsLink,
|
||||
inputs: {},
|
||||
inputRequirements: blockConfig.inputs || {},
|
||||
outputs: blockConfig.outputs || {},
|
||||
tools: blockConfig.tools,
|
||||
}
|
||||
|
||||
// Add input schema from subBlocks configuration
|
||||
if (blockConfig.subBlocks) {
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
blockSchemas[blockId].inputs[subBlock.id] = {
|
||||
type: subBlock.type,
|
||||
title: subBlock.title,
|
||||
description: subBlock.description || '',
|
||||
layout: subBlock.layout,
|
||||
...(subBlock.options && { options: subBlock.options }),
|
||||
...(subBlock.placeholder && { placeholder: subBlock.placeholder }),
|
||||
...(subBlock.min !== undefined && { min: subBlock.min }),
|
||||
...(subBlock.max !== undefined && { max: subBlock.max }),
|
||||
...(subBlock.columns && { columns: subBlock.columns }),
|
||||
...(subBlock.hidden !== undefined && { hidden: subBlock.hidden }),
|
||||
...(subBlock.condition && { condition: subBlock.condition }),
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Handle special block types like loops and parallels
|
||||
blockSchemas[blockId] = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
description: `${block.type.charAt(0).toUpperCase() + block.type.slice(1)} container block`,
|
||||
category: 'Control Flow',
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Generate workflow summary
|
||||
const blockTypes = Object.values(workflowState.blocks).reduce(
|
||||
(acc: Record<string, number>, block: any) => {
|
||||
acc[block.type] = (acc[block.type] || 0) + 1
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const categories = Object.values(blockSchemas).reduce(
|
||||
(acc: Record<string, number>, schema: any) => {
|
||||
if (schema.category) {
|
||||
acc[schema.category] = (acc[schema.category] || 0) + 1
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
// Prepare response with clear context markers
|
||||
const response: any = {
|
||||
workflowContext: 'USER_SPECIFIC_WORKFLOW', // Clear marker for the LLM
|
||||
note: 'This data represents only the blocks and configurations that the user has actually built in their current workflow, not all available Sim Studio capabilities.',
|
||||
yaml,
|
||||
format: 'yaml',
|
||||
summary: {
|
||||
workflowName: workflowRecord.name,
|
||||
blockCount: Object.keys(workflowState.blocks).length,
|
||||
edgeCount: (workflowState.edges || []).length,
|
||||
blockTypes,
|
||||
categories,
|
||||
hasLoops: Object.keys(workflowState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(workflowState.parallels || {}).length > 0,
|
||||
},
|
||||
userBuiltBlocks: blockSchemas, // Renamed to be clearer
|
||||
}
|
||||
|
||||
// Add metadata if requested
|
||||
if (includeMetadata) {
|
||||
response.metadata = {
|
||||
workflowId: workflowRecord.id,
|
||||
name: workflowRecord.name,
|
||||
description: workflowRecord.description,
|
||||
workspaceId: workflowRecord.workspaceId,
|
||||
createdAt: workflowRecord.createdAt,
|
||||
updatedAt: workflowRecord.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Successfully generated workflow YAML', {
|
||||
workflowId,
|
||||
blockCount: response.blockCount,
|
||||
yamlLength: yaml.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: response,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get workflow YAML:', error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to get workflow YAML: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserUsageLimitInfo, updateUserUsageLimit } from '@/lib/billing'
|
||||
import { updateMemberUsageLimit } from '@/lib/billing/core/organization-billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { isOrganizationOwnerOrAdmin } from '@/lib/permissions/utils'
|
||||
|
||||
const logger = createLogger('UnifiedUsageLimitsAPI')
|
||||
|
||||
/**
|
||||
* Unified Usage Limits Endpoint
|
||||
* GET/PUT /api/usage-limits?context=user|member&userId=<id>&organizationId=<id>
|
||||
*
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
|
||||
try {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const context = searchParams.get('context') || 'user'
|
||||
const userId = searchParams.get('userId') || session.user.id
|
||||
const organizationId = searchParams.get('organizationId')
|
||||
|
||||
// Validate context
|
||||
if (!['user', 'member'].includes(context)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid context. Must be "user" or "member"' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// For member context, require organizationId and check permissions
|
||||
if (context === 'member') {
|
||||
if (!organizationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization ID is required when context=member' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if the current user has permission to view member usage info
|
||||
const hasPermission = await isOrganizationOwnerOrAdmin(session.user.id, organizationId)
|
||||
if (!hasPermission) {
|
||||
logger.warn('Unauthorized attempt to view member usage info', {
|
||||
requesterId: session.user.id,
|
||||
targetUserId: userId,
|
||||
organizationId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Permission denied. Only organization owners and admins can view member usage information',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// For user context, ensure they can only view their own info
|
||||
if (context === 'user' && userId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: "Cannot view other users' usage information" },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get usage limit info
|
||||
const usageLimitInfo = await getUserUsageLimitInfo(userId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
userId,
|
||||
organizationId,
|
||||
data: usageLimitInfo,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to get usage limit info', {
|
||||
userId: session?.user?.id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
|
||||
try {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const context = searchParams.get('context') || 'user'
|
||||
const userId = searchParams.get('userId') || session.user.id
|
||||
const organizationId = searchParams.get('organizationId')
|
||||
|
||||
const { limit } = await request.json()
|
||||
|
||||
if (typeof limit !== 'number' || limit < 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid limit. Must be a positive number' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (context === 'user') {
|
||||
// Update user's own usage limit
|
||||
if (userId !== session.user.id) {
|
||||
return NextResponse.json({ error: "Cannot update other users' limits" }, { status: 403 })
|
||||
}
|
||||
|
||||
await updateUserUsageLimit(userId, limit)
|
||||
} else if (context === 'member') {
|
||||
// Update organization member's usage limit
|
||||
if (!organizationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization ID is required when context=member' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Check if the current user has permission to update member limits
|
||||
const hasPermission = await isOrganizationOwnerOrAdmin(session.user.id, organizationId)
|
||||
if (!hasPermission) {
|
||||
logger.warn('Unauthorized attempt to update member usage limit', {
|
||||
adminUserId: session.user.id,
|
||||
targetUserId: userId,
|
||||
organizationId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Permission denied. Only organization owners and admins can update member usage limits',
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info('Authorized member usage limit update', {
|
||||
adminUserId: session.user.id,
|
||||
targetUserId: userId,
|
||||
organizationId,
|
||||
newLimit: limit,
|
||||
})
|
||||
|
||||
await updateMemberUsageLimit(organizationId, userId, limit, session.user.id)
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid context. Must be "user" or "member"' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Return updated limit info
|
||||
const updatedInfo = await getUserUsageLimitInfo(userId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
context,
|
||||
userId,
|
||||
organizationId,
|
||||
data: updatedInfo,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to update usage limit', {
|
||||
userId: session?.user?.id,
|
||||
error,
|
||||
})
|
||||
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ import { apiKey } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('ApiKeyAPI')
|
||||
|
||||
// DELETE /api/users/me/api-keys/[id] - Delete an API key
|
||||
// DELETE /api/user/api-keys/[id] - Delete an API key
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
@@ -9,7 +9,7 @@ import { apiKey } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('ApiKeysAPI')
|
||||
|
||||
// GET /api/users/me/api-keys - Get all API keys for the current user
|
||||
// GET /api/user/api-keys - Get all API keys for the current user
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
@@ -45,7 +45,7 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/users/me/api-keys - Create a new API key
|
||||
// POST /api/user/api-keys - Create a new API key
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
69
apps/sim/app/api/user/stats/route.ts
Normal file
69
apps/sim/app/api/user/stats/route.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { userStats, workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('UserStatsAPI')
|
||||
|
||||
/**
|
||||
* GET endpoint to retrieve user statistics including the count of workflows
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
// Get the user session
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn('Unauthorized user stats access attempt')
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
// Get workflow count for user
|
||||
const [workflowCountResult] = await db
|
||||
.select({ count: sql`count(*)::int` })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
|
||||
const workflowCount = workflowCountResult?.count || 0
|
||||
|
||||
// Get user stats record
|
||||
const userStatsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId))
|
||||
|
||||
// If no stats record exists, create one
|
||||
if (userStatsRecords.length === 0) {
|
||||
const newStats = {
|
||||
id: crypto.randomUUID(),
|
||||
userId,
|
||||
totalManualExecutions: 0,
|
||||
totalApiCalls: 0,
|
||||
totalWebhookTriggers: 0,
|
||||
totalScheduledExecutions: 0,
|
||||
totalChatExecutions: 0,
|
||||
totalTokensUsed: 0,
|
||||
totalCost: '0.00',
|
||||
lastActive: new Date(),
|
||||
}
|
||||
|
||||
await db.insert(userStats).values(newStats)
|
||||
|
||||
// Return the newly created stats with workflow count
|
||||
return NextResponse.json({
|
||||
...newStats,
|
||||
workflowCount,
|
||||
})
|
||||
}
|
||||
|
||||
// Return stats with workflow count
|
||||
const stats = userStatsRecords[0]
|
||||
return NextResponse.json({
|
||||
...stats,
|
||||
workflowCount,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error fetching user stats:', error)
|
||||
return NextResponse.json({ error: 'Failed to fetch user statistics' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
251
apps/sim/app/api/user/subscription/[id]/seats/route.test.ts
Normal file
251
apps/sim/app/api/user/subscription/[id]/seats/route.test.ts
Normal file
@@ -0,0 +1,251 @@
|
||||
/**
|
||||
* Tests for Subscription Seats Update API
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockDb,
|
||||
mockLogger,
|
||||
mockPersonalSubscription,
|
||||
mockRegularMember,
|
||||
mockSubscription,
|
||||
mockTeamSubscription,
|
||||
mockUser,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Subscription Seats Update API Routes', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue({
|
||||
user: mockUser,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/subscription/utils', () => ({
|
||||
checkEnterprisePlan: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: mockDb,
|
||||
}))
|
||||
|
||||
mockDb.select.mockReturnValue({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockSubscription]),
|
||||
})
|
||||
|
||||
const mockSetFn = vi.fn().mockReturnThis()
|
||||
const mockWhereFn = vi.fn().mockResolvedValue([{ affected: 1 }])
|
||||
mockDb.update.mockReturnValue({
|
||||
set: mockSetFn,
|
||||
where: mockWhereFn,
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('POST handler', () => {
|
||||
it('should encounter a permission error when trying to update subscription seats', async () => {
|
||||
vi.doMock('@/lib/subscription/utils', () => ({
|
||||
checkEnterprisePlan: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
mockDb.select.mockImplementationOnce(() => ({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockSubscription]),
|
||||
}))
|
||||
|
||||
mockDb.select.mockImplementationOnce(() => ({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([]),
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty(
|
||||
'error',
|
||||
'Unauthorized - you do not have permission to modify this subscription'
|
||||
)
|
||||
expect(mockDb.update).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reject team plan subscription updates', async () => {
|
||||
vi.doMock('@/lib/subscription/utils', () => ({
|
||||
checkEnterprisePlan: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
mockDb.select.mockReturnValue({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockTeamSubscription]),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data).toHaveProperty(
|
||||
'error',
|
||||
'Only enterprise subscriptions can be updated through this endpoint'
|
||||
)
|
||||
expect(mockDb.update).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should encounter permission issues with personal subscription updates', async () => {
|
||||
vi.doMock('@/lib/subscription/utils', () => ({
|
||||
checkEnterprisePlan: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
mockDb.select.mockReturnValue({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockPersonalSubscription]),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error')
|
||||
})
|
||||
|
||||
it('should reject updates from non-admin members', async () => {
|
||||
vi.doMock('@/lib/subscription/utils', () => ({
|
||||
checkEnterprisePlan: vi.fn().mockReturnValue(true),
|
||||
}))
|
||||
|
||||
const mockSelectImpl = vi
|
||||
.fn()
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockSubscription]),
|
||||
})
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([mockRegularMember]),
|
||||
})
|
||||
|
||||
mockDb.select.mockImplementation(mockSelectImpl)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error')
|
||||
})
|
||||
|
||||
it('should reject invalid request parameters', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
seats: -5,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data).toHaveProperty('error', 'Invalid request parameters')
|
||||
expect(mockDb.update).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle subscription not found with permission error', async () => {
|
||||
mockDb.select.mockReturnValue({
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
then: vi.fn().mockResolvedValue([]),
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error')
|
||||
})
|
||||
|
||||
it('should handle authentication error', async () => {
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue(null),
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
expect(mockDb.update).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle internal server error', async () => {
|
||||
mockDb.select.mockImplementation(() => {
|
||||
throw new Error('Database error')
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
seats: 10,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const response = await POST(req, { params: Promise.resolve({ id: 'sub-123' }) })
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data).toHaveProperty('error', 'Failed to update subscription seats')
|
||||
expect(mockLogger.error).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
151
apps/sim/app/api/user/subscription/[id]/seats/route.ts
Normal file
151
apps/sim/app/api/user/subscription/[id]/seats/route.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { checkEnterprisePlan } from '@/lib/subscription/utils'
|
||||
import { db } from '@/db'
|
||||
import { member, subscription } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('SubscriptionSeatsUpdateAPI')
|
||||
|
||||
const updateSeatsSchema = z.object({
|
||||
seats: z.number().int().min(1),
|
||||
})
|
||||
|
||||
const subscriptionMetadataSchema = z
|
||||
.object({
|
||||
perSeatAllowance: z.number().positive().optional(),
|
||||
totalAllowance: z.number().positive().optional(),
|
||||
updatedAt: z.string().optional(),
|
||||
})
|
||||
.catchall(z.any())
|
||||
|
||||
interface SubscriptionMetadata {
|
||||
perSeatAllowance?: number
|
||||
totalAllowance?: number
|
||||
updatedAt?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
try {
|
||||
const subscriptionId = (await params).id
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
logger.warn('Unauthorized seats update attempt')
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
let body
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (_parseError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid JSON in request body',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validationResult = updateSeatsSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationResult.error.format(),
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { seats } = validationResult.data
|
||||
|
||||
const sub = await db
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(eq(subscription.id, subscriptionId))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!sub) {
|
||||
return NextResponse.json({ error: 'Subscription not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (!checkEnterprisePlan(sub)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only enterprise subscriptions can be updated through this endpoint' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const isPersonalSubscription = sub.referenceId === session.user.id
|
||||
|
||||
let hasAccess = isPersonalSubscription
|
||||
|
||||
if (!isPersonalSubscription) {
|
||||
const mem = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(eq(member.userId, session.user.id), eq(member.organizationId, sub.referenceId)))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
hasAccess = mem && (mem.role === 'owner' || mem.role === 'admin')
|
||||
}
|
||||
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Unauthorized - you do not have permission to modify this subscription' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
let validatedMetadata: SubscriptionMetadata
|
||||
try {
|
||||
validatedMetadata = subscriptionMetadataSchema.parse(sub.metadata || {})
|
||||
} catch (error) {
|
||||
logger.error('Invalid subscription metadata format', {
|
||||
error,
|
||||
subscriptionId,
|
||||
metadata: sub.metadata,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Subscription metadata has invalid format' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (validatedMetadata.perSeatAllowance && validatedMetadata.perSeatAllowance > 0) {
|
||||
validatedMetadata.totalAllowance = seats * validatedMetadata.perSeatAllowance
|
||||
validatedMetadata.updatedAt = new Date().toISOString()
|
||||
}
|
||||
|
||||
await db
|
||||
.update(subscription)
|
||||
.set({
|
||||
seats,
|
||||
metadata: validatedMetadata,
|
||||
})
|
||||
.where(eq(subscription.id, subscriptionId))
|
||||
|
||||
logger.info('Subscription seats updated', {
|
||||
subscriptionId,
|
||||
oldSeats: sub.seats,
|
||||
newSeats: seats,
|
||||
userId: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Subscription seats updated successfully',
|
||||
seats,
|
||||
metadata: validatedMetadata,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error updating subscription seats', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return NextResponse.json({ error: 'Failed to update subscription seats' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user