mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-24 22:38:00 -05:00
Compare commits
123 Commits
fix/router
...
lakees/db
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b03d1f5d58 | ||
|
|
be757a4f1e | ||
|
|
1938818027 | ||
|
|
2818b745d1 | ||
|
|
2d49de76ea | ||
|
|
1f682eb343 | ||
|
|
8d43947eb5 | ||
|
|
107679bf41 | ||
|
|
a8e413a999 | ||
|
|
f05f5bbc6d | ||
|
|
87f8fcdbf2 | ||
|
|
6e8dc771fe | ||
|
|
d0c3c6aec7 | ||
|
|
8574d66aac | ||
|
|
e79e9e7367 | ||
|
|
a8bb0db660 | ||
|
|
4b6de03a62 | ||
|
|
37b50cbce6 | ||
|
|
7ca628db13 | ||
|
|
118e4f65f0 | ||
|
|
292cd39cfb | ||
|
|
ea77790484 | ||
|
|
895591514a | ||
|
|
0e1133fc42 | ||
|
|
4357230a9d | ||
|
|
e7f45166af | ||
|
|
c662a31ac8 | ||
|
|
51d1b958e2 | ||
|
|
3d81c1cc14 | ||
|
|
94c6795efc | ||
|
|
86c5e1b4ff | ||
|
|
cca1772ae1 | ||
|
|
e4dd14df7a | ||
|
|
448b8f056c | ||
|
|
abb671e61b | ||
|
|
f90c9c7593 | ||
|
|
2e624c20b5 | ||
|
|
7093209bce | ||
|
|
897891ee1e | ||
|
|
42aa794713 | ||
|
|
ea72ab5aa9 | ||
|
|
5173320bb5 | ||
|
|
26d96624af | ||
|
|
271375df9b | ||
|
|
a940dd6351 | ||
|
|
e69500726b | ||
|
|
c94bb5acda | ||
|
|
fef2d2cc82 | ||
|
|
44909964b7 | ||
|
|
1a13762617 | ||
|
|
cfffd050a2 | ||
|
|
d00997c5ea | ||
|
|
466559578e | ||
|
|
0a6312dbac | ||
|
|
e503408825 | ||
|
|
ed543a71f9 | ||
|
|
7f894ec023 | ||
|
|
57fbd2aa1c | ||
|
|
80270ce7b2 | ||
|
|
fdc3af994c | ||
|
|
5a69d16e65 | ||
|
|
c3afbaebce | ||
|
|
793c888808 | ||
|
|
ffad20efc5 | ||
|
|
b08ce03409 | ||
|
|
c9373c7b3e | ||
|
|
cbb93c65b6 | ||
|
|
96a3fe59ff | ||
|
|
df3e869f22 | ||
|
|
b3ca0c947c | ||
|
|
cfbc8d7211 | ||
|
|
15bef489f2 | ||
|
|
4422a69a17 | ||
|
|
8f9cf93231 | ||
|
|
22f89cf67d | ||
|
|
dfa018f2d4 | ||
|
|
e287388b03 | ||
|
|
4d176c0717 | ||
|
|
c155d8ac6c | ||
|
|
48250f5ed8 | ||
|
|
fc6dbcf066 | ||
|
|
a537ca7ebe | ||
|
|
c1eef30578 | ||
|
|
6605c887ed | ||
|
|
a919816bff | ||
|
|
8a8589e18d | ||
|
|
ed807bebf2 | ||
|
|
48ecb19af7 | ||
|
|
9a3d5631f2 | ||
|
|
0872314fbf | ||
|
|
7e4fc32d82 | ||
|
|
4316f45175 | ||
|
|
e80660f218 | ||
|
|
5dddb03eac | ||
|
|
6386e6b437 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -4696,6 +4696,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TableIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth={2}
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
{...props}
|
||||
>
|
||||
<rect width='18' height='18' x='3' y='3' rx='2' />
|
||||
<path d='M3 9h18' />
|
||||
<path d='M3 15h18' />
|
||||
<path d='M9 3v18' />
|
||||
<path d='M15 3v18' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -108,6 +108,7 @@ import {
|
||||
StagehandIcon,
|
||||
StripeIcon,
|
||||
SupabaseIcon,
|
||||
TableIcon,
|
||||
TavilyIcon,
|
||||
TelegramIcon,
|
||||
TextractIcon,
|
||||
@@ -236,6 +237,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
stripe: StripeIcon,
|
||||
stt: STTIcon,
|
||||
supabase: SupabaseIcon,
|
||||
table: TableIcon,
|
||||
tavily: TavilyIcon,
|
||||
telegram: TelegramIcon,
|
||||
textract: TextractIcon,
|
||||
|
||||
@@ -124,44 +124,11 @@ Choose between four types of loops:
|
||||
3. Drag other blocks inside the loop container
|
||||
4. Connect the blocks as needed
|
||||
|
||||
### Referencing Loop Data
|
||||
### Accessing Results
|
||||
|
||||
There's an important distinction between referencing loop data from **inside** vs **outside** the loop:
|
||||
After a loop completes, you can access aggregated results:
|
||||
|
||||
<Tabs items={['Inside the Loop', 'Outside the Loop']}>
|
||||
<Tab>
|
||||
**Inside the loop**, use `<loop.>` references to access the current iteration context:
|
||||
|
||||
- **`<loop.index>`**: Current iteration number (0-based)
|
||||
- **`<loop.currentItem>`**: Current item being processed (forEach only)
|
||||
- **`<loop.items>`**: Full collection being iterated (forEach only)
|
||||
|
||||
```
|
||||
// Inside a Function block within the loop
|
||||
const idx = <loop.index>; // 0, 1, 2, ...
|
||||
const item = <loop.currentItem>; // Current item
|
||||
```
|
||||
|
||||
<Callout type="info">
|
||||
These references are only available for blocks **inside** the loop container. They give you access to the current iteration's context.
|
||||
</Callout>
|
||||
</Tab>
|
||||
<Tab>
|
||||
**Outside the loop** (after it completes), reference the loop block by its name to access aggregated results:
|
||||
|
||||
- **`<LoopBlockName.results>`**: Array of results from all iterations
|
||||
|
||||
```
|
||||
// If your loop block is named "Process Items"
|
||||
const allResults = <processitems.results>;
|
||||
// Returns: [result1, result2, result3, ...]
|
||||
```
|
||||
|
||||
<Callout type="info">
|
||||
After the loop completes, use the loop's block name (not `loop.`) to access the collected results. The block name is normalized (lowercase, no spaces).
|
||||
</Callout>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
- **`<loop.results>`**: Array of results from all loop iterations
|
||||
|
||||
## Example Use Cases
|
||||
|
||||
@@ -217,29 +184,28 @@ Variables (i=0) → Loop (While i<10) → Agent (Process) → Variables (i++)
|
||||
</ul>
|
||||
</Tab>
|
||||
<Tab>
|
||||
Available **inside** the loop only:
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li>
|
||||
<strong>{"<loop.index>"}</strong>: Current iteration number (0-based)
|
||||
<strong>loop.currentItem</strong>: Current item being processed
|
||||
</li>
|
||||
<li>
|
||||
<strong>{"<loop.currentItem>"}</strong>: Current item being processed (forEach only)
|
||||
<strong>loop.index</strong>: Current iteration number (0-based)
|
||||
</li>
|
||||
<li>
|
||||
<strong>{"<loop.items>"}</strong>: Full collection (forEach only)
|
||||
<strong>loop.items</strong>: Full collection (forEach loops)
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
<Tab>
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li>
|
||||
<strong>{"<blockname.results>"}</strong>: Array of all iteration results (accessed via block name)
|
||||
<strong>loop.results</strong>: Array of all iteration results
|
||||
</li>
|
||||
<li>
|
||||
<strong>Structure</strong>: Results maintain iteration order
|
||||
</li>
|
||||
<li>
|
||||
<strong>Access</strong>: Available in blocks after the loop completes
|
||||
<strong>Access</strong>: Available in blocks after the loop
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
|
||||
@@ -76,44 +76,11 @@ Choose between two types of parallel execution:
|
||||
3. Drag a single block inside the parallel container
|
||||
4. Connect the block as needed
|
||||
|
||||
### Referencing Parallel Data
|
||||
### Accessing Results
|
||||
|
||||
There's an important distinction between referencing parallel data from **inside** vs **outside** the parallel block:
|
||||
After a parallel block completes, you can access aggregated results:
|
||||
|
||||
<Tabs items={['Inside the Parallel', 'Outside the Parallel']}>
|
||||
<Tab>
|
||||
**Inside the parallel**, use `<parallel.>` references to access the current instance context:
|
||||
|
||||
- **`<parallel.index>`**: Current instance number (0-based)
|
||||
- **`<parallel.currentItem>`**: Item for this instance (collection-based only)
|
||||
- **`<parallel.items>`**: Full collection being distributed (collection-based only)
|
||||
|
||||
```
|
||||
// Inside a Function block within the parallel
|
||||
const idx = <parallel.index>; // 0, 1, 2, ...
|
||||
const item = <parallel.currentItem>; // This instance's item
|
||||
```
|
||||
|
||||
<Callout type="info">
|
||||
These references are only available for blocks **inside** the parallel container. They give you access to the current instance's context.
|
||||
</Callout>
|
||||
</Tab>
|
||||
<Tab>
|
||||
**Outside the parallel** (after it completes), reference the parallel block by its name to access aggregated results:
|
||||
|
||||
- **`<ParallelBlockName.results>`**: Array of results from all instances
|
||||
|
||||
```
|
||||
// If your parallel block is named "Process Tasks"
|
||||
const allResults = <processtasks.results>;
|
||||
// Returns: [result1, result2, result3, ...]
|
||||
```
|
||||
|
||||
<Callout type="info">
|
||||
After the parallel completes, use the parallel's block name (not `parallel.`) to access the collected results. The block name is normalized (lowercase, no spaces).
|
||||
</Callout>
|
||||
</Tab>
|
||||
</Tabs>
|
||||
- **`<parallel.results>`**: Array of results from all parallel instances
|
||||
|
||||
## Example Use Cases
|
||||
|
||||
@@ -131,11 +98,11 @@ Parallel (["gpt-4o", "claude-3.7-sonnet", "gemini-2.5-pro"]) → Agent → Evalu
|
||||
|
||||
### Result Aggregation
|
||||
|
||||
Results from all parallel instances are automatically collected and accessible via the block name:
|
||||
Results from all parallel instances are automatically collected:
|
||||
|
||||
```javascript
|
||||
// In a Function block after a parallel named "Process Tasks"
|
||||
const allResults = <processtasks.results>;
|
||||
// In a Function block after the parallel
|
||||
const allResults = input.parallel.results;
|
||||
// Returns: [result1, result2, result3, ...]
|
||||
```
|
||||
|
||||
@@ -191,26 +158,25 @@ Understanding when to use each:
|
||||
</ul>
|
||||
</Tab>
|
||||
<Tab>
|
||||
Available **inside** the parallel only:
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li>
|
||||
<strong>{"<parallel.index>"}</strong>: Instance number (0-based)
|
||||
<strong>parallel.currentItem</strong>: Item for this instance
|
||||
</li>
|
||||
<li>
|
||||
<strong>{"<parallel.currentItem>"}</strong>: Item for this instance (collection-based only)
|
||||
<strong>parallel.index</strong>: Instance number (0-based)
|
||||
</li>
|
||||
<li>
|
||||
<strong>{"<parallel.items>"}</strong>: Full collection (collection-based only)
|
||||
<strong>parallel.items</strong>: Full collection (collection-based)
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
<Tab>
|
||||
<ul className="list-disc space-y-2 pl-6">
|
||||
<li>
|
||||
<strong>{"<blockname.results>"}</strong>: Array of all instance results (accessed via block name)
|
||||
<strong>parallel.results</strong>: Array of all instance results
|
||||
</li>
|
||||
<li>
|
||||
<strong>Access</strong>: Available in blocks after the parallel completes
|
||||
<strong>Access</strong>: Available in blocks after the parallel
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
|
||||
@@ -104,6 +104,7 @@
|
||||
"stripe",
|
||||
"stt",
|
||||
"supabase",
|
||||
"table",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
|
||||
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
@@ -0,0 +1,351 @@
|
||||
---
|
||||
title: Table
|
||||
description: User-defined data tables for storing and querying structured data
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="table"
|
||||
color="#10B981"
|
||||
/>
|
||||
|
||||
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
|
||||
|
||||
**Why Use Tables?**
|
||||
- **No external setup**: Create tables instantly without configuring external databases
|
||||
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
|
||||
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
|
||||
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
|
||||
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
|
||||
|
||||
**Key Features:**
|
||||
- Create tables with custom schemas
|
||||
- Insert, update, upsert, and delete rows
|
||||
- Query with filters and sorting
|
||||
- Batch operations for bulk inserts
|
||||
- Bulk updates and deletes by filter
|
||||
- Up to 10,000 rows per table, 100 tables per workspace
|
||||
|
||||
## Creating Tables
|
||||
|
||||
Tables are created from the **Tables** section in the sidebar. Each table requires:
|
||||
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
|
||||
- **Description**: Optional description of the table's purpose
|
||||
- **Schema**: Define columns with name, type, and optional constraints
|
||||
|
||||
### Column Types
|
||||
|
||||
| Type | Description | Example Values |
|
||||
|------|-------------|----------------|
|
||||
| `string` | Text data | `"John Doe"`, `"active"` |
|
||||
| `number` | Numeric data | `42`, `99.99` |
|
||||
| `boolean` | True/false values | `true`, `false` |
|
||||
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
|
||||
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
|
||||
|
||||
### Column Constraints
|
||||
|
||||
- **Required**: Column must have a value (cannot be null)
|
||||
- **Unique**: Values must be unique across all rows (enables upsert matching)
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
|
||||
|
||||
## Tools
|
||||
|
||||
### `table_query_rows`
|
||||
|
||||
Query rows from a table with filtering, sorting, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | No | Filter conditions using MongoDB-style operators |
|
||||
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
|
||||
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
|
||||
| `offset` | number | No | Number of rows to skip \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether query succeeded |
|
||||
| `rows` | array | Query result rows |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
| `totalCount` | number | Total rows matching filter |
|
||||
| `limit` | number | Limit used in query |
|
||||
| `offset` | number | Offset used in query |
|
||||
|
||||
### `table_insert_row`
|
||||
|
||||
Insert a new row into a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data as JSON object matching the table schema |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was inserted |
|
||||
| `row` | object | Inserted row data including generated ID |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_upsert_row`
|
||||
|
||||
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data to insert or update |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was upserted |
|
||||
| `row` | object | Upserted row data |
|
||||
| `operation` | string | Operation performed: "insert" or "update" |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_batch_insert_rows`
|
||||
|
||||
Insert multiple rows at once (up to 1000 rows per batch)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rows` | array | Yes | Array of row data objects to insert |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether batch insert succeeded |
|
||||
| `rows` | array | Array of inserted rows with IDs |
|
||||
| `insertedCount` | number | Number of rows inserted |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_row`
|
||||
|
||||
Update a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to update |
|
||||
| `data` | object | Yes | Data to update \(partial update supported\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was updated |
|
||||
| `row` | object | Updated row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_rows_by_filter`
|
||||
|
||||
Update multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for update |
|
||||
| `data` | object | Yes | Data to apply to matching rows |
|
||||
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether update succeeded |
|
||||
| `updatedCount` | number | Number of rows updated |
|
||||
| `updatedRowIds` | array | IDs of updated rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_row`
|
||||
|
||||
Delete a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was deleted |
|
||||
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_rows_by_filter`
|
||||
|
||||
Delete multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for deletion |
|
||||
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether delete succeeded |
|
||||
| `deletedCount` | number | Number of rows deleted |
|
||||
| `deletedRowIds` | array | IDs of deleted rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_row`
|
||||
|
||||
Get a single row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was found |
|
||||
| `row` | object | Row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_schema`
|
||||
|
||||
Get the schema definition for a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether schema was retrieved |
|
||||
| `name` | string | Table name |
|
||||
| `columns` | array | Array of column definitions |
|
||||
| `message` | string | Status message |
|
||||
|
||||
## Filter Operators
|
||||
|
||||
Filters use MongoDB-style operators for flexible querying:
|
||||
|
||||
| Operator | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
|
||||
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
|
||||
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
|
||||
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
|
||||
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
|
||||
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
|
||||
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
|
||||
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
|
||||
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
|
||||
|
||||
### Combining Filters
|
||||
|
||||
Multiple field conditions are combined with AND logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "active",
|
||||
"age": {"$gte": 18}
|
||||
}
|
||||
```
|
||||
|
||||
Use `$or` for OR logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"$or": [
|
||||
{"status": "active"},
|
||||
{"status": "pending"}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Sort Specification
|
||||
|
||||
Specify sort order with column names and direction:
|
||||
|
||||
```json
|
||||
{
|
||||
"createdAt": "desc"
|
||||
}
|
||||
```
|
||||
|
||||
Multi-column sorting:
|
||||
|
||||
```json
|
||||
{
|
||||
"priority": "desc",
|
||||
"name": "asc"
|
||||
}
|
||||
```
|
||||
|
||||
## Built-in Columns
|
||||
|
||||
Every row automatically includes:
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | string | Unique row identifier |
|
||||
| `createdAt` | date | When the row was created |
|
||||
| `updatedAt` | date | When the row was last modified |
|
||||
|
||||
These can be used in filters and sorting.
|
||||
|
||||
## Limits
|
||||
|
||||
| Resource | Limit |
|
||||
|----------|-------|
|
||||
| Tables per workspace | 100 |
|
||||
| Rows per table | 10,000 |
|
||||
| Columns per table | 50 |
|
||||
| Max row size | 100KB |
|
||||
| String value length | 10,000 characters |
|
||||
| Query limit | 1,000 rows |
|
||||
| Batch insert size | 1,000 rows |
|
||||
| Bulk update/delete | 1,000 rows |
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `blocks`
|
||||
- Type: `table`
|
||||
- Tables are scoped to workspaces and accessible from any workflow within that workspace
|
||||
- Data persists across workflow executions
|
||||
- Use unique constraints to enable upsert functionality
|
||||
- The visual filter/sort builder provides an easy way to construct queries without writing JSON
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -21,7 +22,6 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
@@ -107,6 +107,7 @@ export default function LoginPage({
|
||||
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
|
||||
const [showValidationError, setShowValidationError] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
@@ -114,6 +115,7 @@ export default function LoginPage({
|
||||
const [forgotPasswordOpen, setForgotPasswordOpen] = useState(false)
|
||||
const [forgotPasswordEmail, setForgotPasswordEmail] = useState('')
|
||||
const [isSubmittingReset, setIsSubmittingReset] = useState(false)
|
||||
const [isResetButtonHovered, setIsResetButtonHovered] = useState(false)
|
||||
const [resetStatus, setResetStatus] = useState<{
|
||||
type: 'success' | 'error' | null
|
||||
message: string
|
||||
@@ -182,13 +184,6 @@ export default function LoginPage({
|
||||
e.preventDefault()
|
||||
setIsLoading(true)
|
||||
|
||||
const redirectToVerify = (emailToVerify: string) => {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', emailToVerify)
|
||||
}
|
||||
router.push('/verify')
|
||||
}
|
||||
|
||||
const formData = new FormData(e.currentTarget)
|
||||
const emailRaw = formData.get('email') as string
|
||||
const email = emailRaw.trim().toLowerCase()
|
||||
@@ -220,9 +215,9 @@ export default function LoginPage({
|
||||
onError: (ctx) => {
|
||||
logger.error('Login error:', ctx.error)
|
||||
|
||||
// EMAIL_NOT_VERIFIED is handled by the catch block which redirects to /verify
|
||||
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
errorHandled = true
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -290,7 +285,10 @@ export default function LoginPage({
|
||||
router.push(safeCallbackUrl)
|
||||
} catch (err: any) {
|
||||
if (err.message?.includes('not verified') || err.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
redirectToVerify(email)
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', email)
|
||||
}
|
||||
router.push('/verify')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -493,14 +491,24 @@ export default function LoginPage({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Signing in'
|
||||
>
|
||||
Sign in
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Signing in...' : 'Sign in'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
@@ -611,15 +619,25 @@ export default function LoginPage({
|
||||
<p>{resetStatus.message}</p>
|
||||
</div>
|
||||
)}
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='button'
|
||||
onClick={handleForgotPassword}
|
||||
onMouseEnter={() => setIsResetButtonHovered(true)}
|
||||
onMouseLeave={() => setIsResetButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmittingReset}
|
||||
loading={isSubmittingReset}
|
||||
loadingText='Sending'
|
||||
>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmittingReset ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isResetButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface RequestResetFormProps {
|
||||
email: string
|
||||
@@ -27,6 +28,9 @@ export function RequestResetForm({
|
||||
statusMessage,
|
||||
className,
|
||||
}: RequestResetFormProps) {
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
onSubmit(email)
|
||||
@@ -64,14 +68,24 @@ export function RequestResetForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
disabled={isSubmitting}
|
||||
loading={isSubmitting}
|
||||
loadingText='Sending'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -98,6 +112,8 @@ export function SetNewPasswordForm({
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
@@ -227,14 +243,24 @@ export function SetNewPasswordForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
<Button
|
||||
disabled={isSubmitting || !token}
|
||||
loading={isSubmitting}
|
||||
loadingText='Resetting'
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
>
|
||||
Reset Password
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Resetting...' : 'Reset Password'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { client, useSession } from '@/lib/auth/auth-client'
|
||||
@@ -13,7 +14,6 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
@@ -97,6 +97,7 @@ function SignupFormContent({
|
||||
const [redirectUrl, setRedirectUrl] = useState('')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [nameErrors, setNameErrors] = useState<string[]>([])
|
||||
@@ -475,14 +476,24 @@ function SignupFormContent({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Creating account'
|
||||
>
|
||||
Create account
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Creating account' : 'Create account'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { X } from 'lucide-react'
|
||||
import { Textarea } from '@/components/emcn'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import {
|
||||
@@ -17,7 +18,6 @@ import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import Footer from '@/app/(landing)/components/footer/footer'
|
||||
import Nav from '@/app/(landing)/components/nav/nav'
|
||||
|
||||
@@ -493,17 +493,18 @@ export default function CareersPage() {
|
||||
|
||||
{/* Submit Button */}
|
||||
<div className='flex justify-end pt-2'>
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
disabled={isSubmitting || submitStatus === 'success'}
|
||||
loading={isSubmitting}
|
||||
loadingText='Submitting'
|
||||
showArrow={false}
|
||||
fullWidth={false}
|
||||
className='min-w-[200px]'
|
||||
className='min-w-[200px] rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all duration-300 hover:opacity-90 disabled:opacity-50'
|
||||
size='lg'
|
||||
>
|
||||
{submitStatus === 'success' ? 'Submitted' : 'Submit Application'}
|
||||
</BrandedButton>
|
||||
{isSubmitting
|
||||
? 'Submitting...'
|
||||
: submitStatus === 'success'
|
||||
? 'Submitted'
|
||||
: 'Submit Application'}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
@@ -59,7 +59,7 @@ export default function StatusIndicator() {
|
||||
href={statusUrl}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className={`flex min-w-[165px] items-center gap-[6px] whitespace-nowrap text-[12px] transition-colors ${STATUS_COLORS[status]}`}
|
||||
className={`flex items-center gap-[6px] whitespace-nowrap text-[12px] transition-colors ${STATUS_COLORS[status]}`}
|
||||
aria-label={`System status: ${message}`}
|
||||
>
|
||||
<StatusDotIcon status={status} className='h-[6px] w-[6px]' aria-hidden='true' />
|
||||
|
||||
@@ -10,8 +10,8 @@ export { LandingLoopNode } from './landing-canvas/landing-block/landing-loop-nod
|
||||
export { LandingNode } from './landing-canvas/landing-block/landing-node'
|
||||
export type { LoopBlockProps } from './landing-canvas/landing-block/loop-block'
|
||||
export { LoopBlock } from './landing-canvas/landing-block/loop-block'
|
||||
export type { SubBlockRowProps, TagProps } from './landing-canvas/landing-block/tag'
|
||||
export { SubBlockRow, Tag } from './landing-canvas/landing-block/tag'
|
||||
export type { TagProps } from './landing-canvas/landing-block/tag'
|
||||
export { Tag } from './landing-canvas/landing-block/tag'
|
||||
export type {
|
||||
LandingBlockNode,
|
||||
LandingCanvasProps,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import React from 'react'
|
||||
import { BookIcon } from 'lucide-react'
|
||||
import {
|
||||
SubBlockRow,
|
||||
type SubBlockRowProps,
|
||||
Tag,
|
||||
type TagProps,
|
||||
} from '@/app/(landing)/components/hero/components/landing-canvas/landing-block/tag'
|
||||
|
||||
/**
|
||||
* Data structure for a landing card component
|
||||
* Matches the workflow block structure from the application
|
||||
*/
|
||||
export interface LandingCardData {
|
||||
/** Icon element to display in the card header */
|
||||
@@ -15,8 +15,8 @@ export interface LandingCardData {
|
||||
color: string | '#f6f6f6'
|
||||
/** Name/title of the card */
|
||||
name: string
|
||||
/** Optional subblock rows to display below the header */
|
||||
tags?: SubBlockRowProps[]
|
||||
/** Optional tags to display at the bottom of the card */
|
||||
tags?: TagProps[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -28,8 +28,7 @@ export interface LandingBlockProps extends LandingCardData {
|
||||
}
|
||||
|
||||
/**
|
||||
* Landing block component that displays a card with icon, name, and optional subblock rows
|
||||
* Styled to match the application's workflow blocks
|
||||
* Landing block component that displays a card with icon, name, and optional tags
|
||||
* @param props - Component properties including icon, color, name, tags, and className
|
||||
* @returns A styled block card component
|
||||
*/
|
||||
@@ -40,37 +39,33 @@ export const LandingBlock = React.memo(function LandingBlock({
|
||||
tags,
|
||||
className,
|
||||
}: LandingBlockProps) {
|
||||
const hasContentBelowHeader = tags && tags.length > 0
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`z-10 flex w-[250px] flex-col rounded-[8px] border border-[#E5E5E5] bg-white ${className ?? ''}`}
|
||||
className={`z-10 flex w-64 flex-col items-start gap-3 rounded-[14px] border border-[#E5E5E5] bg-[#FEFEFE] p-3 ${className ?? ''}`}
|
||||
style={{
|
||||
boxShadow: '0 1px 2px 0 rgba(0, 0, 0, 0.05)',
|
||||
}}
|
||||
>
|
||||
{/* Header - matches workflow-block.tsx header styling */}
|
||||
<div
|
||||
className={`flex items-center justify-between p-[8px] ${hasContentBelowHeader ? 'border-[#E5E5E5] border-b' : ''}`}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div className='flex w-full items-center justify-between'>
|
||||
<div className='flex items-center gap-2.5'>
|
||||
<div
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ background: color as string }}
|
||||
className='flex h-6 w-6 items-center justify-center rounded-[8px] text-white'
|
||||
style={{ backgroundColor: color as string }}
|
||||
>
|
||||
{icon}
|
||||
</div>
|
||||
<span className='truncate font-medium text-[#171717] text-[16px]' title={name}>
|
||||
{name}
|
||||
</span>
|
||||
<p className='text-base text-card-foreground'>{name}</p>
|
||||
</div>
|
||||
<BookIcon className='h-4 w-4 text-muted-foreground' />
|
||||
</div>
|
||||
|
||||
{/* Content - SubBlock Rows matching workflow-block.tsx */}
|
||||
{hasContentBelowHeader && (
|
||||
<div className='flex flex-col gap-[8px] p-[8px]'>
|
||||
{tags && tags.length > 0 ? (
|
||||
<div className='flex flex-wrap gap-2'>
|
||||
{tags.map((tag) => (
|
||||
<SubBlockRow key={tag.label} icon={tag.icon} label={tag.label} />
|
||||
<Tag key={tag.label} icon={tag.icon} label={tag.label} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
) : null}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
@@ -7,14 +7,9 @@ import {
|
||||
type LandingCardData,
|
||||
} from '@/app/(landing)/components/hero/components/landing-canvas/landing-block/landing-block'
|
||||
|
||||
/**
|
||||
* Handle Y offset from block top - matches HANDLE_POSITIONS.DEFAULT_Y_OFFSET
|
||||
*/
|
||||
const HANDLE_Y_OFFSET = 20
|
||||
|
||||
/**
|
||||
* React Flow node component for the landing canvas
|
||||
* Styled to match the application's workflow blocks
|
||||
* Includes CSS animations and connection handles
|
||||
* @param props - Component properties containing node data
|
||||
* @returns A React Flow compatible node component
|
||||
*/
|
||||
@@ -46,15 +41,15 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
style={{
|
||||
width: '7px',
|
||||
height: '20px',
|
||||
background: '#D1D1D1',
|
||||
border: 'none',
|
||||
borderRadius: '2px 0 0 2px',
|
||||
top: `${HANDLE_Y_OFFSET}px`,
|
||||
left: '-7px',
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
background: '#FEFEFE',
|
||||
border: '1px solid #E5E5E5',
|
||||
borderRadius: '50%',
|
||||
top: '50%',
|
||||
left: '-20px',
|
||||
transform: 'translateY(-50%)',
|
||||
zIndex: 10,
|
||||
zIndex: 2,
|
||||
}}
|
||||
isConnectable={false}
|
||||
/>
|
||||
@@ -64,15 +59,15 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
style={{
|
||||
width: '7px',
|
||||
height: '20px',
|
||||
background: '#D1D1D1',
|
||||
border: 'none',
|
||||
borderRadius: '0 2px 2px 0',
|
||||
top: `${HANDLE_Y_OFFSET}px`,
|
||||
right: '-7px',
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
background: '#FEFEFE',
|
||||
border: '1px solid #E5E5E5',
|
||||
borderRadius: '50%',
|
||||
top: '50%',
|
||||
right: '-20px',
|
||||
transform: 'translateY(-50%)',
|
||||
zIndex: 10,
|
||||
zIndex: 2,
|
||||
}}
|
||||
isConnectable={false}
|
||||
/>
|
||||
|
||||
@@ -15,7 +15,6 @@ export interface LoopBlockProps {
|
||||
/**
|
||||
* Loop block container component that provides a styled container
|
||||
* for grouping related elements with a dashed border
|
||||
* Styled to match the application's subflow containers
|
||||
* @param props - Component properties including children and styling
|
||||
* @returns A styled loop container component
|
||||
*/
|
||||
@@ -30,33 +29,33 @@ export const LoopBlock = React.memo(function LoopBlock({
|
||||
style={{
|
||||
width: '1198px',
|
||||
height: '528px',
|
||||
borderRadius: '8px',
|
||||
background: 'rgba(59, 130, 246, 0.08)',
|
||||
borderRadius: '14px',
|
||||
background: 'rgba(59, 130, 246, 0.10)',
|
||||
position: 'relative',
|
||||
...style,
|
||||
}}
|
||||
>
|
||||
{/* Custom dashed border with SVG - 8px border radius to match blocks */}
|
||||
{/* Custom dashed border with SVG */}
|
||||
<svg
|
||||
className='pointer-events-none absolute inset-0 h-full w-full'
|
||||
style={{ borderRadius: '8px' }}
|
||||
style={{ borderRadius: '14px' }}
|
||||
preserveAspectRatio='none'
|
||||
>
|
||||
<path
|
||||
className='landing-loop-animated-dash'
|
||||
d='M 1190 527.5
|
||||
L 8 527.5
|
||||
A 7.5 7.5 0 0 1 0.5 520
|
||||
L 0.5 8
|
||||
A 7.5 7.5 0 0 1 8 0.5
|
||||
L 1190 0.5
|
||||
A 7.5 7.5 0 0 1 1197.5 8
|
||||
L 1197.5 520
|
||||
A 7.5 7.5 0 0 1 1190 527.5 Z'
|
||||
d='M 1183.5 527.5
|
||||
L 14 527.5
|
||||
A 13.5 13.5 0 0 1 0.5 514
|
||||
L 0.5 14
|
||||
A 13.5 13.5 0 0 1 14 0.5
|
||||
L 1183.5 0.5
|
||||
A 13.5 13.5 0 0 1 1197 14
|
||||
L 1197 514
|
||||
A 13.5 13.5 0 0 1 1183.5 527.5 Z'
|
||||
fill='none'
|
||||
stroke='#3B82F6'
|
||||
strokeWidth='1'
|
||||
strokeDasharray='8 8'
|
||||
strokeDasharray='12 12'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
</svg>
|
||||
|
||||
@@ -1,52 +1,25 @@
|
||||
import React from 'react'
|
||||
|
||||
/**
|
||||
* Properties for a subblock row component
|
||||
* Matches the SubBlockRow pattern from workflow-block.tsx
|
||||
* Properties for a tag component
|
||||
*/
|
||||
export interface SubBlockRowProps {
|
||||
/** Icon element to display (optional, for visual context) */
|
||||
icon?: React.ReactNode
|
||||
/** Text label for the row title */
|
||||
export interface TagProps {
|
||||
/** Icon element to display in the tag */
|
||||
icon: React.ReactNode
|
||||
/** Text label for the tag */
|
||||
label: string
|
||||
/** Optional value to display on the right side */
|
||||
value?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Kept for backwards compatibility
|
||||
* Tag component for displaying labeled icons in a compact format
|
||||
* @param props - Tag properties including icon and label
|
||||
* @returns A styled tag component
|
||||
*/
|
||||
export type TagProps = SubBlockRowProps
|
||||
|
||||
/**
|
||||
* SubBlockRow component matching the workflow block's subblock row style
|
||||
* @param props - Row properties including label and optional value
|
||||
* @returns A styled row component
|
||||
*/
|
||||
export const SubBlockRow = React.memo(function SubBlockRow({ label, value }: SubBlockRowProps) {
|
||||
// Split label by colon to separate title and value if present
|
||||
const [title, displayValue] = label.includes(':')
|
||||
? label.split(':').map((s) => s.trim())
|
||||
: [label, value]
|
||||
|
||||
export const Tag = React.memo(function Tag({ icon, label }: TagProps) {
|
||||
return (
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='min-w-0 truncate text-[#888888] text-[14px] capitalize' title={title}>
|
||||
{title}
|
||||
</span>
|
||||
{displayValue && (
|
||||
<span
|
||||
className='flex-1 truncate text-right text-[#171717] text-[14px]'
|
||||
title={displayValue}
|
||||
>
|
||||
{displayValue}
|
||||
</span>
|
||||
)}
|
||||
<div className='flex w-fit items-center gap-1 rounded-[8px] border border-gray-300 bg-white px-2 py-0.5'>
|
||||
<div className='h-3 w-3 text-muted-foreground'>{icon}</div>
|
||||
<p className='text-muted-foreground text-xs leading-normal'>{label}</p>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Tag component - alias for SubBlockRow for backwards compatibility
|
||||
*/
|
||||
export const Tag = SubBlockRow
|
||||
|
||||
@@ -9,10 +9,9 @@ import { LandingFlow } from '@/app/(landing)/components/hero/components/landing-
|
||||
|
||||
/**
|
||||
* Visual constants for landing node dimensions
|
||||
* Matches BLOCK_DIMENSIONS from the application
|
||||
*/
|
||||
export const CARD_WIDTH = 250
|
||||
export const CARD_HEIGHT = 100
|
||||
export const CARD_WIDTH = 256
|
||||
export const CARD_HEIGHT = 92
|
||||
|
||||
/**
|
||||
* Landing block node with positioning information
|
||||
|
||||
@@ -4,29 +4,33 @@ import React from 'react'
|
||||
import { type EdgeProps, getSmoothStepPath, Position } from 'reactflow'
|
||||
|
||||
/**
|
||||
* Custom edge component with animated dashed line
|
||||
* Styled to match the application's workflow edges with rectangular handles
|
||||
* Custom edge component with animated dotted line that floats between handles
|
||||
* @param props - React Flow edge properties
|
||||
* @returns An animated dashed edge component
|
||||
* @returns An animated dotted edge component
|
||||
*/
|
||||
export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
const { id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, style } = props
|
||||
const { id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, style, data } =
|
||||
props
|
||||
|
||||
// Adjust the connection points to connect flush with rectangular handles
|
||||
// Handle width is 7px, positioned at -7px from edge
|
||||
// Adjust the connection points to create floating effect
|
||||
// Account for handle size (12px) and additional spacing
|
||||
const handleRadius = 6 // Half of handle width (12px)
|
||||
const floatingGap = 1 // Additional gap for floating effect
|
||||
|
||||
// Calculate adjusted positions based on edge direction
|
||||
let adjustedSourceX = sourceX
|
||||
let adjustedTargetX = targetX
|
||||
|
||||
if (sourcePosition === Position.Right) {
|
||||
adjustedSourceX = sourceX + 1
|
||||
adjustedSourceX = sourceX + handleRadius + floatingGap
|
||||
} else if (sourcePosition === Position.Left) {
|
||||
adjustedSourceX = sourceX - 1
|
||||
adjustedSourceX = sourceX - handleRadius - floatingGap
|
||||
}
|
||||
|
||||
if (targetPosition === Position.Left) {
|
||||
adjustedTargetX = targetX - 1
|
||||
adjustedTargetX = targetX - handleRadius - floatingGap
|
||||
} else if (targetPosition === Position.Right) {
|
||||
adjustedTargetX = targetX + 1
|
||||
adjustedTargetX = targetX + handleRadius + floatingGap
|
||||
}
|
||||
|
||||
const [path] = getSmoothStepPath({
|
||||
@@ -36,8 +40,8 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
targetY,
|
||||
sourcePosition,
|
||||
targetPosition,
|
||||
borderRadius: 8,
|
||||
offset: 16,
|
||||
borderRadius: 20,
|
||||
offset: 10,
|
||||
})
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
'use client'
|
||||
|
||||
import React from 'react'
|
||||
import { ArrowUp, CodeIcon } from 'lucide-react'
|
||||
import {
|
||||
ArrowUp,
|
||||
BinaryIcon,
|
||||
BookIcon,
|
||||
CalendarIcon,
|
||||
CodeIcon,
|
||||
Globe2Icon,
|
||||
MessageSquareIcon,
|
||||
VariableIcon,
|
||||
} from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { type Edge, type Node, Position } from 'reactflow'
|
||||
import {
|
||||
@@ -14,6 +23,7 @@ import {
|
||||
JiraIcon,
|
||||
LinearIcon,
|
||||
NotionIcon,
|
||||
OpenAIIcon,
|
||||
OutlookIcon,
|
||||
PackageSearchIcon,
|
||||
PineconeIcon,
|
||||
@@ -55,56 +65,67 @@ const SERVICE_TEMPLATES = {
|
||||
|
||||
/**
|
||||
* Landing blocks for the canvas preview
|
||||
* Styled to match the application's workflow blocks with subblock rows
|
||||
*/
|
||||
const LANDING_BLOCKS: LandingManualBlock[] = [
|
||||
{
|
||||
id: 'schedule',
|
||||
name: 'Schedule',
|
||||
color: '#7B68EE',
|
||||
icon: <ScheduleIcon className='h-[16px] w-[16px] text-white' />,
|
||||
icon: <ScheduleIcon className='h-4 w-4' />,
|
||||
positions: {
|
||||
mobile: { x: 8, y: 60 },
|
||||
tablet: { x: 40, y: 120 },
|
||||
desktop: { x: 60, y: 180 },
|
||||
},
|
||||
tags: [{ label: 'Time: 09:00AM Daily' }, { label: 'Timezone: PST' }],
|
||||
tags: [
|
||||
{ icon: <CalendarIcon className='h-3 w-3' />, label: '09:00AM Daily' },
|
||||
{ icon: <Globe2Icon className='h-3 w-3' />, label: 'PST' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'knowledge',
|
||||
name: 'Knowledge',
|
||||
color: '#00B0B0',
|
||||
icon: <PackageSearchIcon className='h-[16px] w-[16px] text-white' />,
|
||||
icon: <PackageSearchIcon className='h-4 w-4' />,
|
||||
positions: {
|
||||
mobile: { x: 120, y: 140 },
|
||||
tablet: { x: 220, y: 200 },
|
||||
desktop: { x: 420, y: 241 },
|
||||
},
|
||||
tags: [{ label: 'Source: Product Vector DB' }, { label: 'Limit: 10' }],
|
||||
tags: [
|
||||
{ icon: <BookIcon className='h-3 w-3' />, label: 'Product Vector DB' },
|
||||
{ icon: <BinaryIcon className='h-3 w-3' />, label: 'Limit: 10' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'agent',
|
||||
name: 'Agent',
|
||||
color: '#802FFF',
|
||||
icon: <AgentIcon className='h-[16px] w-[16px] text-white' />,
|
||||
icon: <AgentIcon className='h-4 w-4' />,
|
||||
positions: {
|
||||
mobile: { x: 340, y: 60 },
|
||||
tablet: { x: 540, y: 120 },
|
||||
desktop: { x: 880, y: 142 },
|
||||
},
|
||||
tags: [{ label: 'Model: gpt-5' }, { label: 'Prompt: You are a support ag...' }],
|
||||
tags: [
|
||||
{ icon: <OpenAIIcon className='h-3 w-3' />, label: 'gpt-5' },
|
||||
{ icon: <MessageSquareIcon className='h-3 w-3' />, label: 'You are a support ag...' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'function',
|
||||
name: 'Function',
|
||||
color: '#FF402F',
|
||||
icon: <CodeIcon className='h-[16px] w-[16px] text-white' />,
|
||||
icon: <CodeIcon className='h-4 w-4' />,
|
||||
positions: {
|
||||
mobile: { x: 480, y: 220 },
|
||||
tablet: { x: 740, y: 280 },
|
||||
desktop: { x: 880, y: 340 },
|
||||
},
|
||||
tags: [{ label: 'Language: Python' }, { label: 'Code: time = "2025-09-01...' }],
|
||||
tags: [
|
||||
{ icon: <CodeIcon className='h-3 w-3' />, label: 'Python' },
|
||||
{ icon: <VariableIcon className='h-3 w-3' />, label: 'time = "2025-09-01...' },
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -229,7 +229,7 @@ function PricingCard({
|
||||
*/
|
||||
export default function LandingPricing() {
|
||||
return (
|
||||
<section id='pricing' className='px-4 pt-[23px] sm:px-0 sm:pt-[4px]' aria-label='Pricing plans'>
|
||||
<section id='pricing' className='px-4 pt-[19px] sm:px-0 sm:pt-0' aria-label='Pricing plans'>
|
||||
<h2 className='sr-only'>Pricing Plans</h2>
|
||||
<div className='relative mx-auto w-full max-w-[1289px]'>
|
||||
<div className='grid grid-cols-1 gap-4 sm:grid-cols-2 sm:gap-0 lg:grid-cols-4'>
|
||||
|
||||
@@ -11,7 +11,6 @@ import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('nav')
|
||||
|
||||
@@ -21,12 +20,11 @@ interface NavProps {
|
||||
}
|
||||
|
||||
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
|
||||
const [githubStars, setGithubStars] = useState('26.1k')
|
||||
const [githubStars, setGithubStars] = useState('25.1k')
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [isLoginHovered, setIsLoginHovered] = useState(false)
|
||||
const router = useRouter()
|
||||
const brand = useBrandConfig()
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
useEffect(() => {
|
||||
if (variant !== 'landing') return
|
||||
@@ -185,7 +183,7 @@ export default function Nav({ hideAuthButtons = false, variant = 'landing' }: Na
|
||||
href='/signup'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all`}
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
aria-label='Get started with Sim - Sign up for free'
|
||||
prefetch={true}
|
||||
>
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { ArrowLeft, ChevronLeft } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
|
||||
export function BackLink() {
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
|
||||
return (
|
||||
<Link
|
||||
href='/studio'
|
||||
className='group flex items-center gap-1 text-gray-600 text-sm hover:text-gray-900'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
>
|
||||
<span className='group-hover:-translate-x-0.5 inline-flex transition-transform duration-200'>
|
||||
{isHovered ? (
|
||||
<ArrowLeft className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronLeft className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
Back to Sim Studio
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -5,10 +5,7 @@ import { Avatar, AvatarFallback, AvatarImage } from '@/components/emcn'
|
||||
import { FAQ } from '@/lib/blog/faq'
|
||||
import { getAllPostMeta, getPostBySlug, getRelatedPosts } from '@/lib/blog/registry'
|
||||
import { buildArticleJsonLd, buildBreadcrumbJsonLd, buildPostMetadata } from '@/lib/blog/seo'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BackLink } from '@/app/(landing)/studio/[slug]/back-link'
|
||||
import { ShareButton } from '@/app/(landing)/studio/[slug]/share-button'
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const posts = await getAllPostMeta()
|
||||
@@ -51,7 +48,9 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
/>
|
||||
<header className='mx-auto max-w-[1450px] px-6 pt-8 sm:px-8 sm:pt-12 md:px-12 md:pt-16'>
|
||||
<div className='mb-6'>
|
||||
<BackLink />
|
||||
<Link href='/studio' className='text-gray-600 text-sm hover:text-gray-900'>
|
||||
← Back to Sim Studio
|
||||
</Link>
|
||||
</div>
|
||||
<div className='flex flex-col gap-8 md:flex-row md:gap-12'>
|
||||
<div className='w-full flex-shrink-0 md:w-[450px]'>
|
||||
@@ -76,31 +75,28 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
>
|
||||
{post.title}
|
||||
</h1>
|
||||
<div className='mt-4 flex items-center justify-between'>
|
||||
<div className='flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<ShareButton url={`${getBaseUrl()}/studio/${slug}`} title={post.title} />
|
||||
<div className='mt-4 flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Share2 } from 'lucide-react'
|
||||
import { Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
|
||||
interface ShareButtonProps {
|
||||
url: string
|
||||
title: string
|
||||
}
|
||||
|
||||
export function ShareButton({ url, title }: ShareButtonProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopyLink = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(url)
|
||||
setCopied(true)
|
||||
setTimeout(() => {
|
||||
setCopied(false)
|
||||
setOpen(false)
|
||||
}, 1000)
|
||||
} catch {
|
||||
setOpen(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleShareTwitter = () => {
|
||||
const tweetUrl = `https://twitter.com/intent/tweet?url=${encodeURIComponent(url)}&text=${encodeURIComponent(title)}`
|
||||
window.open(tweetUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const handleShareLinkedIn = () => {
|
||||
const linkedInUrl = `https://www.linkedin.com/sharing/share-offsite/?url=${encodeURIComponent(url)}`
|
||||
window.open(linkedInUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className='flex items-center gap-1.5 text-gray-600 text-sm hover:text-gray-900'
|
||||
aria-label='Share this post'
|
||||
>
|
||||
<Share2 className='h-4 w-4' />
|
||||
<span>Share</span>
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' minWidth={140}>
|
||||
<PopoverItem onClick={handleCopyLink}>{copied ? 'Copied!' : 'Copy link'}</PopoverItem>
|
||||
<PopoverItem onClick={handleShareTwitter}>Share on X</PopoverItem>
|
||||
<PopoverItem onClick={handleShareLinkedIn}>Share on LinkedIn</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -22,7 +22,7 @@ export default async function StudioIndex({
|
||||
? filtered.sort((a, b) => {
|
||||
if (a.featured && !b.featured) return -1
|
||||
if (!a.featured && b.featured) return 1
|
||||
return new Date(b.date).getTime() - new Date(a.date).getTime()
|
||||
return 0
|
||||
})
|
||||
: filtered
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('A2AAgentCardAPI')
|
||||
|
||||
@@ -96,11 +95,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<Ro
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
|
||||
if (!workspaceAccess.canWrite) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
if (
|
||||
@@ -166,11 +160,6 @@ export async function DELETE(request: NextRequest, { params }: { params: Promise
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
|
||||
if (!workspaceAccess.canWrite) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
|
||||
|
||||
logger.info(`Deleted A2A agent: ${agentId}`)
|
||||
@@ -205,11 +194,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
|
||||
return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
|
||||
if (!workspaceAccess.canWrite) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const action = body.action as 'publish' | 'unpublish' | 'refresh'
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
@@ -1119,13 +1118,17 @@ async function handlePushNotificationSet(
|
||||
)
|
||||
}
|
||||
|
||||
const urlValidation = validateExternalUrl(
|
||||
params.pushNotificationConfig.url,
|
||||
'Push notification URL'
|
||||
)
|
||||
if (!urlValidation.isValid) {
|
||||
try {
|
||||
const url = new URL(params.pushNotificationConfig.url)
|
||||
if (url.protocol !== 'https:') {
|
||||
return NextResponse.json(
|
||||
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Push notification URL must use HTTPS'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
return NextResponse.json(
|
||||
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, urlValidation.error || 'Invalid URL'),
|
||||
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Invalid push notification URL'),
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
@@ -4,11 +4,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
import {
|
||||
getMicrosoftRefreshTokenExpiry,
|
||||
isMicrosoftProvider,
|
||||
PROACTIVE_REFRESH_THRESHOLD_DAYS,
|
||||
} from '@/lib/oauth/microsoft'
|
||||
|
||||
const logger = createLogger('OAuthUtilsAPI')
|
||||
|
||||
@@ -210,32 +205,15 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Refreshing token for credential`)
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
try {
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
@@ -249,15 +227,11 @@ export async function refreshAccessTokenIfNeeded(
|
||||
userId: credential.userId,
|
||||
hasRefreshToken: !!credential.refreshToken,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Prepare update data
|
||||
const updateData: Record<string, unknown> = {
|
||||
const updateData: any = {
|
||||
accessToken: refreshedToken.accessToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + refreshedToken.expiresIn * 1000),
|
||||
updatedAt: new Date(),
|
||||
@@ -269,10 +243,6 @@ export async function refreshAccessTokenIfNeeded(
|
||||
updateData.refreshToken = refreshedToken.refreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
@@ -286,10 +256,6 @@ export async function refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
userId: credential.userId,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
} else if (!accessToken) {
|
||||
@@ -311,27 +277,10 @@ export async function refreshTokenIfNeeded(
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// If token appears valid and present, return it directly
|
||||
if (!shouldRefresh) {
|
||||
@@ -344,17 +293,13 @@ export async function refreshTokenIfNeeded(
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
throw new Error('Failed to refresh token')
|
||||
}
|
||||
|
||||
const { accessToken: refreshedToken, expiresIn, refreshToken: newRefreshToken } = refreshResult
|
||||
|
||||
// Prepare update data
|
||||
const updateData: Record<string, unknown> = {
|
||||
const updateData: any = {
|
||||
accessToken: refreshedToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + expiresIn * 1000), // Use provider's expiry
|
||||
updatedAt: new Date(),
|
||||
@@ -366,10 +311,6 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshToken = newRefreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
@@ -390,11 +331,6 @@ export async function refreshTokenIfNeeded(
|
||||
}
|
||||
}
|
||||
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Refresh failed and no valid token found in DB`, error)
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -104,11 +104,17 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
// Build execution params starting with LLM-provided arguments
|
||||
// Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
|
||||
// Resolve all {{ENV_VAR}} references in the arguments
|
||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
||||
toolArgs,
|
||||
decryptedEnvVars,
|
||||
{ deep: true }
|
||||
{
|
||||
resolveExactMatch: true,
|
||||
allowEmbedded: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: true,
|
||||
}
|
||||
) as Record<string, any>
|
||||
|
||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
||||
|
||||
@@ -84,14 +84,6 @@ vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-123',
|
||||
authType: 'internal_jwt',
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/execution/e2b', () => ({
|
||||
executeInE2B: vi.fn(),
|
||||
}))
|
||||
@@ -118,24 +110,6 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
|
||||
describe('Security Tests', () => {
|
||||
it('should reject unauthorized requests', async () => {
|
||||
const { checkInternalAuth } = await import('@/lib/auth/hybrid')
|
||||
vi.mocked(checkInternalAuth).mockResolvedValueOnce({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return "test"',
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it.concurrent('should use isolated-vm for secure sandboxed execution', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return "test"',
|
||||
@@ -339,7 +313,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validvar: 'block-1',
|
||||
validVar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
},
|
||||
})
|
||||
@@ -565,7 +539,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexdata: 'block-complex',
|
||||
complexData: 'block-complex',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { isE2bEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeInE2B } from '@/lib/execution/e2b'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
@@ -471,17 +470,14 @@ function resolveEnvironmentVariables(
|
||||
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
blockData: Record<string, unknown>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
blockOutputSchemas: Record<string, OutputSchema>,
|
||||
contextVariables: Record<string, unknown>,
|
||||
language = 'javascript'
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
|
||||
|
||||
const tagPattern = new RegExp(
|
||||
`${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
|
||||
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
|
||||
'g'
|
||||
)
|
||||
const tagMatches = resolvedCode.match(tagPattern) || []
|
||||
@@ -490,37 +486,41 @@ function resolveTagVariables(
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
const fieldPath = pathParts.slice(1)
|
||||
|
||||
const result = resolveBlockReference(blockName, fieldPath, {
|
||||
blockNameMapping,
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})
|
||||
|
||||
if (!result) {
|
||||
const blockId = blockNameMapping[blockName]
|
||||
if (!blockId) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue = result.value
|
||||
const blockOutput = blockData[blockId]
|
||||
if (blockOutput === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue: any
|
||||
if (pathParts.length === 1) {
|
||||
tagValue = blockOutput
|
||||
} else {
|
||||
tagValue = navigatePath(blockOutput, pathParts.slice(1))
|
||||
}
|
||||
|
||||
if (tagValue === undefined) {
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof tagValue === 'string') {
|
||||
const trimmed = tagValue.trimStart()
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as string if not valid JSON
|
||||
}
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as-is
|
||||
}
|
||||
}
|
||||
|
||||
const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
@@ -537,27 +537,18 @@ function resolveTagVariables(
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, unknown>,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, unknown> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
blockOutputSchemas: Record<string, OutputSchema> = {},
|
||||
workflowVariables: Record<string, unknown> = {},
|
||||
language = 'javascript'
|
||||
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, unknown> = {}
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
contextVariables,
|
||||
language
|
||||
)
|
||||
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
@@ -582,12 +573,6 @@ export async function POST(req: NextRequest) {
|
||||
let resolvedCode = '' // Store resolved code for error reporting
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(req)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized function execution attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
const { DEFAULT_EXECUTION_TIMEOUT_MS } = await import('@/lib/execution/constants')
|
||||
@@ -600,7 +585,6 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
blockOutputSchemas = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
@@ -617,21 +601,20 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool,
|
||||
})
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
workflowVariables,
|
||||
lang
|
||||
workflowVariables
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -687,11 +670,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -762,11 +741,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
|
||||
@@ -157,7 +157,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: undefined,
|
||||
includeDisabled: false,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -166,7 +166,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should return documents with default filter', async () => {
|
||||
it('should filter disabled documents by default', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -194,7 +194,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: undefined,
|
||||
includeDisabled: false,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -203,7 +203,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should filter documents by enabled status when requested', async () => {
|
||||
it('should include disabled documents when requested', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -223,7 +223,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?enabledFilter=disabled'
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?includeDisabled=true'
|
||||
const req = new Request(url, { method: 'GET' }) as any
|
||||
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/documents/route')
|
||||
@@ -233,7 +233,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: 'disabled',
|
||||
includeDisabled: true,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -361,7 +361,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith(
|
||||
validDocumentData,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
})
|
||||
|
||||
@@ -469,7 +470,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith(
|
||||
validBulkData.documents,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -5,7 +5,6 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
bulkDocumentOperation,
|
||||
bulkDocumentOperationByFilter,
|
||||
createDocumentRecords,
|
||||
createSingleDocument,
|
||||
getDocuments,
|
||||
@@ -58,20 +57,13 @@ const BulkCreateDocumentsSchema = z.object({
|
||||
bulk: z.literal(true),
|
||||
})
|
||||
|
||||
const BulkUpdateDocumentsSchema = z
|
||||
.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once')
|
||||
.optional(),
|
||||
selectAll: z.boolean().optional(),
|
||||
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
|
||||
})
|
||||
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
|
||||
message: 'Either selectAll must be true or documentIds must be provided',
|
||||
})
|
||||
const BulkUpdateDocumentsSchema = z.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once'),
|
||||
})
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
@@ -98,17 +90,14 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const url = new URL(req.url)
|
||||
const enabledFilter = url.searchParams.get('enabledFilter') as
|
||||
| 'all'
|
||||
| 'enabled'
|
||||
| 'disabled'
|
||||
| null
|
||||
const includeDisabled = url.searchParams.get('includeDisabled') === 'true'
|
||||
const search = url.searchParams.get('search') || undefined
|
||||
const limit = Number.parseInt(url.searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(url.searchParams.get('offset') || '0')
|
||||
const sortByParam = url.searchParams.get('sortBy')
|
||||
const sortOrderParam = url.searchParams.get('sortOrder')
|
||||
|
||||
// Validate sort parameters
|
||||
const validSortFields: DocumentSortField[] = [
|
||||
'filename',
|
||||
'fileSize',
|
||||
@@ -116,7 +105,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
'chunkCount',
|
||||
'uploadedAt',
|
||||
'processingStatus',
|
||||
'enabled',
|
||||
]
|
||||
const validSortOrders: SortOrder[] = ['asc', 'desc']
|
||||
|
||||
@@ -132,7 +120,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
enabledFilter: enabledFilter || undefined,
|
||||
includeDisabled,
|
||||
search,
|
||||
limit,
|
||||
offset,
|
||||
@@ -202,7 +190,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -261,10 +250,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw validationError
|
||||
}
|
||||
} else {
|
||||
// Handle single document creation
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
@@ -299,6 +294,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
@@ -335,22 +331,16 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
|
||||
try {
|
||||
const validatedData = BulkUpdateDocumentsSchema.parse(body)
|
||||
const { operation, documentIds, selectAll, enabledFilter } = validatedData
|
||||
const { operation, documentIds } = validatedData
|
||||
|
||||
try {
|
||||
let result
|
||||
if (selectAll) {
|
||||
result = await bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
enabledFilter,
|
||||
requestId
|
||||
)
|
||||
} else if (documentIds && documentIds.length > 0) {
|
||||
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds, requestId)
|
||||
} else {
|
||||
return NextResponse.json({ error: 'No documents specified' }, { status: 400 })
|
||||
}
|
||||
const result = await bulkDocumentOperation(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { McpClient } from '@/lib/mcp/client'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
|
||||
import type { McpTransport } from '@/lib/mcp/types'
|
||||
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
|
||||
const logger = createLogger('McpServerTestAPI')
|
||||
|
||||
@@ -18,6 +19,30 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
|
||||
return transport === 'streamable-http'
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve environment variables in strings
|
||||
*/
|
||||
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
|
||||
const missingVars: string[] = []
|
||||
const resolvedValue = resolveEnvVarReferences(value, envVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: false,
|
||||
missingKeys: missingVars,
|
||||
}) as string
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
const uniqueMissing = Array.from(new Set(missingVars))
|
||||
uniqueMissing.forEach((envKey) => {
|
||||
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
|
||||
})
|
||||
}
|
||||
|
||||
return resolvedValue
|
||||
}
|
||||
|
||||
interface TestConnectionRequest {
|
||||
name: string
|
||||
transport: McpTransport
|
||||
@@ -71,30 +96,39 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
}
|
||||
|
||||
// Build initial config for resolution
|
||||
const initialConfig = {
|
||||
let resolvedUrl = body.url
|
||||
let resolvedHeaders = body.headers || {}
|
||||
|
||||
try {
|
||||
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||
|
||||
if (resolvedUrl) {
|
||||
resolvedUrl = resolveEnvVars(resolvedUrl, envVars)
|
||||
}
|
||||
|
||||
const resolvedHeadersObj: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(resolvedHeaders)) {
|
||||
resolvedHeadersObj[key] = resolveEnvVars(value, envVars)
|
||||
}
|
||||
resolvedHeaders = resolvedHeadersObj
|
||||
} catch (envError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to resolve environment variables, using raw values:`,
|
||||
envError
|
||||
)
|
||||
}
|
||||
|
||||
const testConfig: McpServerConfig = {
|
||||
id: `test-${requestId}`,
|
||||
name: body.name,
|
||||
transport: body.transport,
|
||||
url: body.url,
|
||||
headers: body.headers || {},
|
||||
url: resolvedUrl,
|
||||
headers: resolvedHeaders,
|
||||
timeout: body.timeout || 10000,
|
||||
retries: 1, // Only one retry for tests
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
// Resolve env vars using shared utility (non-strict mode for testing)
|
||||
const { config: testConfig, missingVars } = await resolveMcpConfigEnvVars(
|
||||
initialConfig,
|
||||
userId,
|
||||
workspaceId,
|
||||
{ strict: false }
|
||||
)
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
logger.warn(`[${requestId}] Some environment variables not found:`, { missingVars })
|
||||
}
|
||||
|
||||
const testSecurityPolicy = {
|
||||
requireConsent: false,
|
||||
auditLevel: 'none' as const,
|
||||
|
||||
@@ -3,9 +3,7 @@ import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import { executeProviderRequest } from '@/providers'
|
||||
@@ -22,11 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const startTime = Date.now()
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Provider API request started`, {
|
||||
timestamp: new Date().toISOString(),
|
||||
userAgent: request.headers.get('User-Agent'),
|
||||
@@ -92,13 +85,6 @@ export async function POST(request: NextRequest) {
|
||||
verbosity,
|
||||
})
|
||||
|
||||
if (workspaceId) {
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, auth.userId)
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
let finalApiKey: string | undefined = apiKey
|
||||
try {
|
||||
if (provider === 'vertex' && vertexCredential) {
|
||||
|
||||
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { deleteTable, type TableSchema } from '@/lib/table'
|
||||
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
|
||||
|
||||
const logger = createLogger('TableDetailAPI')
|
||||
|
||||
const GetTableSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface TableRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId] - Retrieves a single table's details. */
|
||||
export async function GET(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table access attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
|
||||
|
||||
const schemaData = table.schema as TableSchema
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: {
|
||||
columns: schemaData.columns.map(normalizeColumn),
|
||||
},
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt:
|
||||
table.createdAt instanceof Date
|
||||
? table.createdAt.toISOString()
|
||||
: String(table.createdAt),
|
||||
updatedAt:
|
||||
table.updatedAt instanceof Date
|
||||
? table.updatedAt.toISOString()
|
||||
: String(table.updatedAt),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
|
||||
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
await deleteTable(tableId, requestId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Table deleted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
276
apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts
Normal file
276
apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { RowData, TableSchema } from '@/lib/table'
|
||||
import { validateRowData } from '@/lib/table'
|
||||
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
|
||||
|
||||
const logger = createLogger('TableRowAPI')
|
||||
|
||||
const GetRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
const UpdateRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
const DeleteRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface RowRouteParams {
|
||||
params: Promise<{ tableId: string; rowId: string }>
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */
|
||||
export async function GET(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetRowSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
createdAt: userTableRows.createdAt,
|
||||
updatedAt: userTableRows.updatedAt,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!row) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to get row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
|
||||
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpdateRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Fetch existing row to support partial updates
|
||||
const [existingRow] = await db
|
||||
.select({ data: userTableRows.data })
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!existingRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Merge existing data with incoming partial data (incoming takes precedence)
|
||||
const mergedData = {
|
||||
...(existingRow.data as RowData),
|
||||
...(validated.data as RowData),
|
||||
}
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData: mergedData,
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
excludeRowId: rowId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const [updatedRow] = await db
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: mergedData,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (!updatedRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row updated successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error updating row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to update row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */
|
||||
export async function DELETE(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = DeleteRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [deletedRow] = await db
|
||||
.delete(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (!deletedRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Row deleted successfully',
|
||||
deletedCount: 1,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
681
apps/sim/app/api/table/[tableId]/rows/route.ts
Normal file
681
apps/sim/app/api/table/[tableId]/rows/route.ts
Normal file
@@ -0,0 +1,681 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { Filter, RowData, Sort, TableSchema } from '@/lib/table'
|
||||
import {
|
||||
checkUniqueConstraintsDb,
|
||||
getUniqueColumns,
|
||||
TABLE_LIMITS,
|
||||
USER_TABLE_ROWS_SQL_NAME,
|
||||
validateBatchRows,
|
||||
validateRowAgainstSchema,
|
||||
validateRowData,
|
||||
validateRowSize,
|
||||
} from '@/lib/table'
|
||||
import { buildFilterClause, buildSortClause } from '@/lib/table/sql'
|
||||
import { accessError, checkAccess } from '../../utils'
|
||||
|
||||
const logger = createLogger('TableRowsAPI')
|
||||
|
||||
const InsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
const BatchInsertRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
rows: z
|
||||
.array(z.record(z.unknown()), { required_error: 'Rows array is required' })
|
||||
.min(1, 'At least one row is required')
|
||||
.max(1000, 'Cannot insert more than 1000 rows per batch'),
|
||||
})
|
||||
|
||||
const QueryRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown()).optional(),
|
||||
sort: z.record(z.enum(['asc', 'desc'])).optional(),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
|
||||
.optional()
|
||||
.default(100),
|
||||
offset: z.coerce
|
||||
.number({ required_error: 'Offset must be a number' })
|
||||
.int('Offset must be an integer')
|
||||
.min(0, 'Offset must be 0 or greater')
|
||||
.optional()
|
||||
.default(0),
|
||||
})
|
||||
|
||||
const UpdateRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
|
||||
data: z.record(z.unknown(), { required_error: 'Update data is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot update more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
const DeleteRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot delete more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
interface TableRowsRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
async function handleBatchInsert(
|
||||
requestId: string,
|
||||
tableId: string,
|
||||
body: z.infer<typeof BatchInsertRowsSchema>,
|
||||
userId: string
|
||||
): Promise<NextResponse> {
|
||||
const validated = BatchInsertRowsSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const workspaceId = validated.workspaceId
|
||||
|
||||
const remainingCapacity = table.maxRows - table.rowCount
|
||||
if (remainingCapacity < validated.rows.length) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await validateBatchRows({
|
||||
rows: validated.rows as RowData[],
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const now = new Date()
|
||||
const rowsToInsert = validated.rows.map((data) => ({
|
||||
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
|
||||
tableId,
|
||||
workspaceId,
|
||||
data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: userId,
|
||||
}))
|
||||
|
||||
const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning()
|
||||
|
||||
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
rows: insertedRows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
insertedCount: insertedRows.length,
|
||||
message: `Successfully inserted ${insertedRows.length} rows`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */
|
||||
export async function POST(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
|
||||
if (
|
||||
typeof body === 'object' &&
|
||||
body !== null &&
|
||||
'rows' in body &&
|
||||
Array.isArray((body as Record<string, unknown>).rows)
|
||||
) {
|
||||
return handleBatchInsert(
|
||||
requestId,
|
||||
tableId,
|
||||
body as z.infer<typeof BatchInsertRowsSchema>,
|
||||
authResult.userId
|
||||
)
|
||||
}
|
||||
|
||||
const validated = InsertRowSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const workspaceId = validated.workspaceId
|
||||
const rowData = validated.data as RowData
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData,
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
if (table.rowCount >= table.maxRows) {
|
||||
return NextResponse.json(
|
||||
{ error: `Table row limit reached (${table.maxRows} rows max)` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
|
||||
const now = new Date()
|
||||
|
||||
const [row] = await db
|
||||
.insert(userTableRows)
|
||||
.values({
|
||||
id: rowId,
|
||||
tableId,
|
||||
workspaceId,
|
||||
data: validated.data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: authResult.userId,
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row inserted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error inserting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */
|
||||
export async function GET(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
const filterParam = searchParams.get('filter')
|
||||
const sortParam = searchParams.get('sort')
|
||||
const limit = searchParams.get('limit')
|
||||
const offset = searchParams.get('offset')
|
||||
|
||||
let filter: Record<string, unknown> | undefined
|
||||
let sort: Sort | undefined
|
||||
|
||||
try {
|
||||
if (filterParam) {
|
||||
filter = JSON.parse(filterParam) as Record<string, unknown>
|
||||
}
|
||||
if (sortParam) {
|
||||
sort = JSON.parse(sortParam) as Sort
|
||||
}
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validated = QueryRowsSchema.parse({
|
||||
workspaceId,
|
||||
filter,
|
||||
sort,
|
||||
limit,
|
||||
offset,
|
||||
})
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
if (validated.filter) {
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
}
|
||||
|
||||
let query = db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
createdAt: userTableRows.createdAt,
|
||||
updatedAt: userTableRows.updatedAt,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.sort) {
|
||||
const schema = table.schema as TableSchema
|
||||
const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns)
|
||||
if (sortClause) {
|
||||
query = query.orderBy(sortClause) as typeof query
|
||||
}
|
||||
} else {
|
||||
query = query.orderBy(userTableRows.createdAt) as typeof query
|
||||
}
|
||||
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
const [{ count: totalCount }] = await countQuery
|
||||
|
||||
const rows = await query.limit(validated.limit).offset(validated.offset)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error querying rows:`, error)
|
||||
return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */
|
||||
export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpdateRowsByFilterSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const updateData = validated.data as RowData
|
||||
|
||||
const sizeValidation = validateRowSize(updateData)
|
||||
if (!sizeValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid row data', details: sizeValidation.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
|
||||
let matchingRowsQuery = db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.limit) {
|
||||
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
|
||||
}
|
||||
|
||||
const matchingRows = await matchingRowsQuery
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
updatedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) {
|
||||
logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`)
|
||||
}
|
||||
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...updateData }
|
||||
const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema)
|
||||
if (!rowValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Updated data does not match schema',
|
||||
details: rowValidation.errors,
|
||||
affectedRowId: row.id,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
// If updating multiple rows, check that updateData doesn't set any unique column
|
||||
// (would cause all rows to have the same value, violating uniqueness)
|
||||
if (matchingRows.length > 1) {
|
||||
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData)
|
||||
if (uniqueColumnsInUpdate.length > 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Cannot set unique column values when updating multiple rows',
|
||||
details: [
|
||||
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
|
||||
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`,
|
||||
],
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check unique constraints against database for each row
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...updateData }
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(
|
||||
tableId,
|
||||
mergedData,
|
||||
table.schema as TableSchema,
|
||||
row.id
|
||||
)
|
||||
|
||||
if (!uniqueValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unique constraint violation',
|
||||
details: uniqueValidation.errors,
|
||||
affectedRowId: row.id,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
let totalUpdated = 0
|
||||
|
||||
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
const updatePromises = batch.map((row) => {
|
||||
const existingData = row.data as RowData
|
||||
return trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: { ...existingData, ...updateData },
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, row.id))
|
||||
})
|
||||
await Promise.all(updatePromises)
|
||||
totalUpdated += batch.length
|
||||
logger.info(
|
||||
`[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows updated successfully',
|
||||
updatedCount: matchingRows.length,
|
||||
updatedRowIds: matchingRows.map((r) => r.id),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error updating rows by filter:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to update rows: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */
|
||||
export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = DeleteRowsByFilterSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
|
||||
let matchingRowsQuery = db
|
||||
.select({ id: userTableRows.id })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.limit) {
|
||||
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
|
||||
}
|
||||
|
||||
const matchingRows = await matchingRowsQuery
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
deletedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (matchingRows.length > TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
logger.warn(`[${requestId}] Deleting ${matchingRows.length} rows. This may take some time.`)
|
||||
}
|
||||
|
||||
const rowIds = matchingRows.map((r) => r.id)
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
let totalDeleted = 0
|
||||
|
||||
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
|
||||
await trx.delete(userTableRows).where(
|
||||
and(
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
|
||||
batch.map((id) => sql`${id}`),
|
||||
sql`, `
|
||||
)}])`
|
||||
)
|
||||
)
|
||||
totalDeleted += batch.length
|
||||
logger.info(
|
||||
`[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows deleted successfully',
|
||||
deletedCount: matchingRows.length,
|
||||
deletedRowIds: rowIds,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting rows by filter:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to delete rows: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
182
apps/sim/app/api/table/[tableId]/rows/upsert/route.ts
Normal file
182
apps/sim/app/api/table/[tableId]/rows/upsert/route.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, or, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { RowData, TableSchema } from '@/lib/table'
|
||||
import { getUniqueColumns, validateRowData } from '@/lib/table'
|
||||
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
|
||||
|
||||
const logger = createLogger('TableUpsertAPI')
|
||||
|
||||
const UpsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
interface UpsertRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */
|
||||
export async function POST(request: NextRequest, { params }: UpsertRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpsertRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const schema = table.schema as TableSchema
|
||||
const rowData = validated.data as RowData
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData,
|
||||
schema,
|
||||
tableId,
|
||||
checkUnique: false,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
|
||||
if (uniqueColumns.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const uniqueFilters = uniqueColumns.map((col) => {
|
||||
const value = rowData[col.name]
|
||||
if (value === undefined || value === null) {
|
||||
return null
|
||||
}
|
||||
return sql`${userTableRows.data}->>${col.name} = ${String(value)}`
|
||||
})
|
||||
|
||||
const validUniqueFilters = uniqueFilters.filter((f): f is Exclude<typeof f, null> => f !== null)
|
||||
|
||||
if (validUniqueFilters.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existingRow] = await db
|
||||
.select()
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
or(...validUniqueFilters)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const now = new Date()
|
||||
|
||||
if (!existingRow && table.rowCount >= table.maxRows) {
|
||||
return NextResponse.json(
|
||||
{ error: `Table row limit reached (${table.maxRows} rows max)` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const upsertResult = await db.transaction(async (trx) => {
|
||||
if (existingRow) {
|
||||
const [updatedRow] = await trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: validated.data,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, existingRow.id))
|
||||
.returning()
|
||||
|
||||
return {
|
||||
row: updatedRow,
|
||||
operation: 'update' as const,
|
||||
}
|
||||
}
|
||||
|
||||
const [insertedRow] = await trx
|
||||
.insert(userTableRows)
|
||||
.values({
|
||||
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
|
||||
tableId,
|
||||
workspaceId: validated.workspaceId,
|
||||
data: validated.data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: authResult.userId,
|
||||
})
|
||||
.returning()
|
||||
|
||||
return {
|
||||
row: insertedRow,
|
||||
operation: 'insert' as const,
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: upsertResult.row.id,
|
||||
data: upsertResult.row.data,
|
||||
createdAt: upsertResult.row.createdAt.toISOString(),
|
||||
updatedAt: upsertResult.row.updatedAt.toISOString(),
|
||||
},
|
||||
operation: upsertResult.operation,
|
||||
message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error upserting row:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to upsert row: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
293
apps/sim/app/api/table/route.ts
Normal file
293
apps/sim/app/api/table/route.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
canCreateTable,
|
||||
createTable,
|
||||
getWorkspaceTableLimits,
|
||||
listTables,
|
||||
TABLE_LIMITS,
|
||||
type TableSchema,
|
||||
} from '@/lib/table'
|
||||
import { normalizeColumn } from './utils'
|
||||
|
||||
const logger = createLogger('TableAPI')
|
||||
|
||||
const ColumnSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Column name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
|
||||
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
|
||||
errorMap: () => ({
|
||||
message: 'Column type must be one of: string, number, boolean, date, json',
|
||||
}),
|
||||
}),
|
||||
required: z.boolean().optional().default(false),
|
||||
unique: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
const CreateTableSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Table name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
|
||||
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
description: z
|
||||
.string()
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
|
||||
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
|
||||
)
|
||||
.optional(),
|
||||
schema: z.object({
|
||||
columns: z
|
||||
.array(ColumnSchema)
|
||||
.min(1, 'Table must have at least one column')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
|
||||
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
|
||||
),
|
||||
}),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
const ListTablesSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface WorkspaceAccessResult {
|
||||
hasAccess: boolean
|
||||
canWrite: boolean
|
||||
}
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceAccessResult> {
|
||||
const [workspaceData] = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
ownerId: workspace.ownerId,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceData) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceData.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permission] = await db
|
||||
.select({
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permission) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
const canWrite = permission.permissionType === 'admin' || permission.permissionType === 'write'
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite,
|
||||
}
|
||||
}
|
||||
|
||||
/** POST /api/table - Creates a new user-defined table. */
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const params = CreateTableSchema.parse(body)
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(
|
||||
params.workspaceId,
|
||||
authResult.userId
|
||||
)
|
||||
|
||||
if (!hasAccess || !canWrite) {
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check billing plan limits
|
||||
const existingTables = await listTables(params.workspaceId)
|
||||
const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length)
|
||||
|
||||
if (!canCreate) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`,
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get plan-based row limits
|
||||
const planLimits = await getWorkspaceTableLimits(params.workspaceId)
|
||||
const maxRowsPerTable = planLimits.maxRowsPerTable
|
||||
|
||||
const normalizedSchema: TableSchema = {
|
||||
columns: params.schema.columns.map(normalizeColumn),
|
||||
}
|
||||
|
||||
const table = await createTable(
|
||||
{
|
||||
name: params.name,
|
||||
description: params.description,
|
||||
schema: normalizedSchema,
|
||||
workspaceId: params.workspaceId,
|
||||
userId: authResult.userId,
|
||||
maxRows: maxRowsPerTable,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: table.schema,
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt:
|
||||
table.createdAt instanceof Date
|
||||
? table.createdAt.toISOString()
|
||||
: String(table.createdAt),
|
||||
updatedAt:
|
||||
table.updatedAt instanceof Date
|
||||
? table.updatedAt.toISOString()
|
||||
: String(table.updatedAt),
|
||||
},
|
||||
message: 'Table created successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
if (
|
||||
error.message.includes('Invalid table name') ||
|
||||
error.message.includes('Invalid schema') ||
|
||||
error.message.includes('already exists') ||
|
||||
error.message.includes('maximum table limit')
|
||||
) {
|
||||
return NextResponse.json({ error: error.message }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error creating table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to create table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** GET /api/table - Lists all tables in a workspace. */
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
|
||||
const validation = ListTablesSchema.safeParse({ workspaceId })
|
||||
if (!validation.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: validation.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const params = validation.data
|
||||
|
||||
const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId)
|
||||
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const tables = await listTables(params.workspaceId)
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
tables: tables.map((t) => {
|
||||
const schemaData = t.schema as TableSchema
|
||||
return {
|
||||
...t,
|
||||
schema: {
|
||||
columns: schemaData.columns.map(normalizeColumn),
|
||||
},
|
||||
createdAt:
|
||||
t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt),
|
||||
updatedAt:
|
||||
t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt),
|
||||
}
|
||||
}),
|
||||
totalCount: tables.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error listing tables:`, error)
|
||||
return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
188
apps/sim/app/api/table/utils.ts
Normal file
188
apps/sim/app/api/table/utils.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NextResponse } from 'next/server'
|
||||
import type { ColumnDefinition, TableDefinition } from '@/lib/table'
|
||||
import { getTableById } from '@/lib/table'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('TableUtils')
|
||||
|
||||
export interface TableAccessResult {
|
||||
hasAccess: true
|
||||
table: TableDefinition
|
||||
}
|
||||
|
||||
export interface TableAccessDenied {
|
||||
hasAccess: false
|
||||
notFound?: boolean
|
||||
reason?: string
|
||||
}
|
||||
|
||||
export type TableAccessCheck = TableAccessResult | TableAccessDenied
|
||||
|
||||
export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 }
|
||||
|
||||
export interface ApiErrorResponse {
|
||||
error: string
|
||||
details?: unknown
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user has read access to a table.
|
||||
* Read access is granted if:
|
||||
* 1. User created the table, OR
|
||||
* 2. User has any permission on the table's workspace (read, write, or admin)
|
||||
*
|
||||
* Follows the same pattern as Knowledge Base access checks.
|
||||
*/
|
||||
export async function checkTableAccess(tableId: string, userId: string): Promise<TableAccessCheck> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { hasAccess: false, notFound: true }
|
||||
}
|
||||
|
||||
// Case 1: User created the table
|
||||
if (table.createdBy === userId) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
// Case 2: Table belongs to a workspace the user has permissions for
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
if (userPermission !== null) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
return { hasAccess: false, reason: 'User does not have access to this table' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user has write access to a table.
|
||||
* Write access is granted if:
|
||||
* 1. User created the table, OR
|
||||
* 2. User has write or admin permissions on the table's workspace
|
||||
*
|
||||
* Follows the same pattern as Knowledge Base write access checks.
|
||||
*/
|
||||
export async function checkTableWriteAccess(
|
||||
tableId: string,
|
||||
userId: string
|
||||
): Promise<TableAccessCheck> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { hasAccess: false, notFound: true }
|
||||
}
|
||||
|
||||
// Case 1: User created the table
|
||||
if (table.createdBy === userId) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
// Case 2: Table belongs to a workspace and user has write/admin permissions
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
if (userPermission === 'write' || userPermission === 'admin') {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
return { hasAccess: false, reason: 'User does not have write access to this table' }
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use checkTableAccess or checkTableWriteAccess instead.
|
||||
* Legacy access check function for backwards compatibility.
|
||||
*/
|
||||
export async function checkAccess(
|
||||
tableId: string,
|
||||
userId: string,
|
||||
level: 'read' | 'write' | 'admin' = 'read'
|
||||
): Promise<AccessResult> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { ok: false, status: 404 }
|
||||
}
|
||||
|
||||
if (table.createdBy === userId) {
|
||||
return { ok: true, table }
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
const hasAccess =
|
||||
permission !== null &&
|
||||
(level === 'read' ||
|
||||
(level === 'write' && (permission === 'write' || permission === 'admin')) ||
|
||||
(level === 'admin' && permission === 'admin'))
|
||||
|
||||
return hasAccess ? { ok: true, table } : { ok: false, status: 403 }
|
||||
}
|
||||
|
||||
export function accessError(
|
||||
result: { ok: false; status: 404 | 403 },
|
||||
requestId: string,
|
||||
context?: string
|
||||
): NextResponse {
|
||||
const message = result.status === 404 ? 'Table not found' : 'Access denied'
|
||||
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
|
||||
return NextResponse.json({ error: message }, { status: result.status })
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a TableAccessDenied result to an appropriate HTTP response.
|
||||
* Use with checkTableAccess or checkTableWriteAccess.
|
||||
*/
|
||||
export function tableAccessError(
|
||||
result: TableAccessDenied,
|
||||
requestId: string,
|
||||
context?: string
|
||||
): NextResponse {
|
||||
const status = result.notFound ? 404 : 403
|
||||
const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied')
|
||||
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
|
||||
return NextResponse.json({ error: message }, { status })
|
||||
}
|
||||
|
||||
export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise<boolean> {
|
||||
const table = await getTableById(tableId)
|
||||
return table?.workspaceId === workspaceId
|
||||
}
|
||||
|
||||
export function errorResponse(
|
||||
message: string,
|
||||
status: number,
|
||||
details?: unknown
|
||||
): NextResponse<ApiErrorResponse> {
|
||||
const body: ApiErrorResponse = { error: message }
|
||||
if (details !== undefined) {
|
||||
body.details = details
|
||||
}
|
||||
return NextResponse.json(body, { status })
|
||||
}
|
||||
|
||||
export function badRequestResponse(message: string, details?: unknown) {
|
||||
return errorResponse(message, 400, details)
|
||||
}
|
||||
|
||||
export function unauthorizedResponse(message = 'Authentication required') {
|
||||
return errorResponse(message, 401)
|
||||
}
|
||||
|
||||
export function forbiddenResponse(message = 'Access denied') {
|
||||
return errorResponse(message, 403)
|
||||
}
|
||||
|
||||
export function notFoundResponse(message = 'Resource not found') {
|
||||
return errorResponse(message, 404)
|
||||
}
|
||||
|
||||
export function serverErrorResponse(message = 'Internal server error') {
|
||||
return errorResponse(message, 500)
|
||||
}
|
||||
|
||||
export function normalizeColumn(col: ColumnDefinition): ColumnDefinition {
|
||||
return {
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -40,18 +39,6 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] A2A set push notification request`, {
|
||||
agentUrl: validatedData.agentUrl,
|
||||
taskId: validatedData.taskId,
|
||||
|
||||
@@ -181,7 +181,7 @@ describe('Custom Tools API Routes', () => {
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-123',
|
||||
authType: 'session',
|
||||
@@ -254,7 +254,7 @@ describe('Custom Tools API Routes', () => {
|
||||
)
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
@@ -304,7 +304,7 @@ describe('Custom Tools API Routes', () => {
|
||||
describe('POST /api/tools/custom', () => {
|
||||
it('should reject unauthorized requests', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
@@ -390,7 +390,7 @@ describe('Custom Tools API Routes', () => {
|
||||
|
||||
it('should prevent unauthorized deletion of user-scoped tool', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-456',
|
||||
authType: 'session',
|
||||
@@ -413,7 +413,7 @@ describe('Custom Tools API Routes', () => {
|
||||
|
||||
it('should reject unauthorized requests', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
|
||||
@@ -4,7 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
@@ -42,8 +42,8 @@ export async function GET(request: NextRequest) {
|
||||
const workflowId = searchParams.get('workflowId')
|
||||
|
||||
try {
|
||||
// Use session/internal auth to support session and internal JWT (no API key access)
|
||||
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
// Use hybrid auth to support session, API key, and internal JWT
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized custom tools access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
@@ -69,8 +69,8 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
|
||||
// Check workspace permissions
|
||||
// For internal JWT with workflowId: checkSessionOrInternalAuth already resolved userId from workflow owner
|
||||
// For session: verify user has access to the workspace
|
||||
// For internal JWT with workflowId: checkHybridAuth already resolved userId from workflow owner
|
||||
// For session/API key: verify user has access to the workspace
|
||||
// For legacy (no workspaceId): skip workspace check, rely on userId match
|
||||
if (resolvedWorkspaceId && !(authResult.authType === 'internal_jwt' && workflowId)) {
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
@@ -116,8 +116,8 @@ export async function POST(req: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
// Use session/internal auth (no API key access)
|
||||
const authResult = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||
// Use hybrid auth (though this endpoint is only called from UI)
|
||||
const authResult = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized custom tools update attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
@@ -193,8 +193,8 @@ export async function DELETE(request: NextRequest) {
|
||||
}
|
||||
|
||||
try {
|
||||
// Use session/internal auth (no API key access)
|
||||
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
// Use hybrid auth (though this endpoint is only called from UI)
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized custom tool deletion attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
@@ -22,7 +22,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Discord send attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
@@ -21,7 +21,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail add label attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail archive attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail delete attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -35,7 +35,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail draft attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail mark read attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail mark unread attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -21,7 +21,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail move attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
@@ -21,7 +21,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail remove label attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -35,7 +35,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail send attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail unarchive attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -56,7 +56,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Drive upload attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
@@ -15,7 +15,7 @@ export async function GET(request: NextRequest) {
|
||||
const imageUrl = url.searchParams.get('url')
|
||||
const requestId = generateRequestId()
|
||||
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.error(`[${requestId}] Authentication failed for image proxy:`, authResult.error)
|
||||
return new NextResponse('Unauthorized', { status: 401 })
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { Resend } from 'resend'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -22,7 +22,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized mail send attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +18,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Teams chat delete attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -23,7 +23,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Teams channel write attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -22,7 +22,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Teams chat write attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
@@ -30,7 +30,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Mistral parse attempt`, {
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLDeleteAPI')
|
||||
@@ -22,12 +21,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL delete attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLExecuteAPI')
|
||||
@@ -21,12 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL execute attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLInsertAPI')
|
||||
@@ -43,12 +42,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL insert attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLIntrospectAPI')
|
||||
@@ -20,12 +19,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL introspect attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLQueryAPI')
|
||||
@@ -21,12 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL query attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLUpdateAPI')
|
||||
@@ -41,12 +40,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized MySQL update attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import * as XLSX from 'xlsx'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
@@ -39,7 +39,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized OneDrive upload attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +18,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook copy attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -17,7 +17,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook delete attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -25,7 +25,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook draft attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -17,7 +17,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook mark read attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -17,7 +17,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook mark unread attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +18,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook move attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -27,7 +27,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook send attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createPostgresConnection, executeDelete } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLDeleteAPI')
|
||||
@@ -22,12 +21,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL delete attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = DeleteSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
createPostgresConnection,
|
||||
executeQuery,
|
||||
@@ -25,12 +24,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL execute attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = ExecuteSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createPostgresConnection, executeInsert } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLInsertAPI')
|
||||
@@ -43,12 +42,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL insert attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
|
||||
const params = InsertSchema.parse(body)
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLIntrospectAPI')
|
||||
@@ -21,12 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL introspect attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLQueryAPI')
|
||||
@@ -21,12 +20,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL query attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = QuerySchema.parse(body)
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { createPostgresConnection, executeUpdate } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLUpdateAPI')
|
||||
@@ -41,12 +40,6 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized PostgreSQL update attempt`)
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const params = UpdateSchema.parse(body)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
@@ -31,7 +31,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Pulse parse attempt`, {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
@@ -27,7 +27,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Reducto parse attempt`, {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { CopyObjectCommand, type ObjectCannedACL, S3Client } from '@aws-sdk/clie
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -24,7 +24,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 copy object attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { DeleteObjectCommand, S3Client } from '@aws-sdk/client-s3'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -21,7 +21,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 delete object attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -23,7 +23,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 list objects attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { type ObjectCannedACL, PutObjectCommand, S3Client } from '@aws-sdk/clien
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -27,7 +27,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 put object attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { SEARCH_TOOL_COST } from '@/lib/billing/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { executeTool } from '@/tools'
|
||||
@@ -22,7 +22,7 @@ export async function POST(request: NextRequest) {
|
||||
const { searchParams: urlParams } = new URL(request.url)
|
||||
const workflowId = urlParams.get('workflowId') || undefined
|
||||
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
const errorMessage = workflowId ? 'Workflow not found' : authResult.error || 'Unauthorized'
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type { SFTPWrapper } from 'ssh2'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
createSftpConnection,
|
||||
@@ -72,7 +72,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP delete attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,7 @@ import path from 'path'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||
|
||||
@@ -25,7 +25,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP download attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
createSftpConnection,
|
||||
@@ -31,7 +31,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP list attempt: ${authResult.error}`)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type { SFTPWrapper } from 'ssh2'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
createSftpConnection,
|
||||
@@ -60,7 +60,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP mkdir attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -44,7 +44,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SFTP upload attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
@@ -23,7 +23,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SharePoint upload attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -13,7 +13,7 @@ const SlackAddReactionSchema = z.object({
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -12,7 +12,7 @@ const SlackDeleteMessageSchema = z.object({
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { openDMChannel } from '../utils'
|
||||
|
||||
@@ -31,7 +31,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack read messages attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { sendSlackMessage } from '../utils'
|
||||
|
||||
@@ -26,7 +26,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack send attempt: ${authResult.error}`)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -19,7 +19,7 @@ export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack update message attempt: ${authResult.error}`)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user