mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-22 05:18:08 -05:00
Compare commits
123 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b03d1f5d58 | ||
|
|
be757a4f1e | ||
|
|
1938818027 | ||
|
|
2818b745d1 | ||
|
|
2d49de76ea | ||
|
|
1f682eb343 | ||
|
|
8d43947eb5 | ||
|
|
107679bf41 | ||
|
|
a8e413a999 | ||
|
|
f05f5bbc6d | ||
|
|
87f8fcdbf2 | ||
|
|
6e8dc771fe | ||
|
|
d0c3c6aec7 | ||
|
|
8574d66aac | ||
|
|
e79e9e7367 | ||
|
|
a8bb0db660 | ||
|
|
4b6de03a62 | ||
|
|
37b50cbce6 | ||
|
|
7ca628db13 | ||
|
|
118e4f65f0 | ||
|
|
292cd39cfb | ||
|
|
ea77790484 | ||
|
|
895591514a | ||
|
|
0e1133fc42 | ||
|
|
4357230a9d | ||
|
|
e7f45166af | ||
|
|
c662a31ac8 | ||
|
|
51d1b958e2 | ||
|
|
3d81c1cc14 | ||
|
|
94c6795efc | ||
|
|
86c5e1b4ff | ||
|
|
cca1772ae1 | ||
|
|
e4dd14df7a | ||
|
|
448b8f056c | ||
|
|
abb671e61b | ||
|
|
f90c9c7593 | ||
|
|
2e624c20b5 | ||
|
|
7093209bce | ||
|
|
897891ee1e | ||
|
|
42aa794713 | ||
|
|
ea72ab5aa9 | ||
|
|
5173320bb5 | ||
|
|
26d96624af | ||
|
|
271375df9b | ||
|
|
a940dd6351 | ||
|
|
e69500726b | ||
|
|
c94bb5acda | ||
|
|
fef2d2cc82 | ||
|
|
44909964b7 | ||
|
|
1a13762617 | ||
|
|
cfffd050a2 | ||
|
|
d00997c5ea | ||
|
|
466559578e | ||
|
|
0a6312dbac | ||
|
|
e503408825 | ||
|
|
ed543a71f9 | ||
|
|
7f894ec023 | ||
|
|
57fbd2aa1c | ||
|
|
80270ce7b2 | ||
|
|
fdc3af994c | ||
|
|
5a69d16e65 | ||
|
|
c3afbaebce | ||
|
|
793c888808 | ||
|
|
ffad20efc5 | ||
|
|
b08ce03409 | ||
|
|
c9373c7b3e | ||
|
|
cbb93c65b6 | ||
|
|
96a3fe59ff | ||
|
|
df3e869f22 | ||
|
|
b3ca0c947c | ||
|
|
cfbc8d7211 | ||
|
|
15bef489f2 | ||
|
|
4422a69a17 | ||
|
|
8f9cf93231 | ||
|
|
22f89cf67d | ||
|
|
dfa018f2d4 | ||
|
|
e287388b03 | ||
|
|
4d176c0717 | ||
|
|
c155d8ac6c | ||
|
|
48250f5ed8 | ||
|
|
fc6dbcf066 | ||
|
|
a537ca7ebe | ||
|
|
c1eef30578 | ||
|
|
6605c887ed | ||
|
|
a919816bff | ||
|
|
8a8589e18d | ||
|
|
ed807bebf2 | ||
|
|
48ecb19af7 | ||
|
|
9a3d5631f2 | ||
|
|
0872314fbf | ||
|
|
7e4fc32d82 | ||
|
|
4316f45175 | ||
|
|
e80660f218 | ||
|
|
5dddb03eac | ||
|
|
6386e6b437 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -4696,6 +4696,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TableIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth={2}
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
{...props}
|
||||
>
|
||||
<rect width='18' height='18' x='3' y='3' rx='2' />
|
||||
<path d='M3 9h18' />
|
||||
<path d='M3 15h18' />
|
||||
<path d='M9 3v18' />
|
||||
<path d='M15 3v18' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -108,6 +108,7 @@ import {
|
||||
StagehandIcon,
|
||||
StripeIcon,
|
||||
SupabaseIcon,
|
||||
TableIcon,
|
||||
TavilyIcon,
|
||||
TelegramIcon,
|
||||
TextractIcon,
|
||||
@@ -236,6 +237,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
stripe: StripeIcon,
|
||||
stt: STTIcon,
|
||||
supabase: SupabaseIcon,
|
||||
table: TableIcon,
|
||||
tavily: TavilyIcon,
|
||||
telegram: TelegramIcon,
|
||||
textract: TextractIcon,
|
||||
|
||||
@@ -104,6 +104,7 @@
|
||||
"stripe",
|
||||
"stt",
|
||||
"supabase",
|
||||
"table",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
|
||||
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
351
apps/docs/content/docs/en/tools/table.mdx
Normal file
@@ -0,0 +1,351 @@
|
||||
---
|
||||
title: Table
|
||||
description: User-defined data tables for storing and querying structured data
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="table"
|
||||
color="#10B981"
|
||||
/>
|
||||
|
||||
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
|
||||
|
||||
**Why Use Tables?**
|
||||
- **No external setup**: Create tables instantly without configuring external databases
|
||||
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
|
||||
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
|
||||
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
|
||||
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
|
||||
|
||||
**Key Features:**
|
||||
- Create tables with custom schemas
|
||||
- Insert, update, upsert, and delete rows
|
||||
- Query with filters and sorting
|
||||
- Batch operations for bulk inserts
|
||||
- Bulk updates and deletes by filter
|
||||
- Up to 10,000 rows per table, 100 tables per workspace
|
||||
|
||||
## Creating Tables
|
||||
|
||||
Tables are created from the **Tables** section in the sidebar. Each table requires:
|
||||
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
|
||||
- **Description**: Optional description of the table's purpose
|
||||
- **Schema**: Define columns with name, type, and optional constraints
|
||||
|
||||
### Column Types
|
||||
|
||||
| Type | Description | Example Values |
|
||||
|------|-------------|----------------|
|
||||
| `string` | Text data | `"John Doe"`, `"active"` |
|
||||
| `number` | Numeric data | `42`, `99.99` |
|
||||
| `boolean` | True/false values | `true`, `false` |
|
||||
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
|
||||
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
|
||||
|
||||
### Column Constraints
|
||||
|
||||
- **Required**: Column must have a value (cannot be null)
|
||||
- **Unique**: Values must be unique across all rows (enables upsert matching)
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
|
||||
|
||||
## Tools
|
||||
|
||||
### `table_query_rows`
|
||||
|
||||
Query rows from a table with filtering, sorting, and pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | No | Filter conditions using MongoDB-style operators |
|
||||
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
|
||||
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
|
||||
| `offset` | number | No | Number of rows to skip \(default: 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether query succeeded |
|
||||
| `rows` | array | Query result rows |
|
||||
| `rowCount` | number | Number of rows returned |
|
||||
| `totalCount` | number | Total rows matching filter |
|
||||
| `limit` | number | Limit used in query |
|
||||
| `offset` | number | Offset used in query |
|
||||
|
||||
### `table_insert_row`
|
||||
|
||||
Insert a new row into a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data as JSON object matching the table schema |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was inserted |
|
||||
| `row` | object | Inserted row data including generated ID |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_upsert_row`
|
||||
|
||||
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `data` | object | Yes | Row data to insert or update |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was upserted |
|
||||
| `row` | object | Upserted row data |
|
||||
| `operation` | string | Operation performed: "insert" or "update" |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_batch_insert_rows`
|
||||
|
||||
Insert multiple rows at once (up to 1000 rows per batch)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rows` | array | Yes | Array of row data objects to insert |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether batch insert succeeded |
|
||||
| `rows` | array | Array of inserted rows with IDs |
|
||||
| `insertedCount` | number | Number of rows inserted |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_row`
|
||||
|
||||
Update a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to update |
|
||||
| `data` | object | Yes | Data to update \(partial update supported\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was updated |
|
||||
| `row` | object | Updated row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_update_rows_by_filter`
|
||||
|
||||
Update multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for update |
|
||||
| `data` | object | Yes | Data to apply to matching rows |
|
||||
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether update succeeded |
|
||||
| `updatedCount` | number | Number of rows updated |
|
||||
| `updatedRowIds` | array | IDs of updated rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_row`
|
||||
|
||||
Delete a specific row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was deleted |
|
||||
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_delete_rows_by_filter`
|
||||
|
||||
Delete multiple rows matching a filter condition
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `filter` | object | Yes | Filter to match rows for deletion |
|
||||
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether delete succeeded |
|
||||
| `deletedCount` | number | Number of rows deleted |
|
||||
| `deletedRowIds` | array | IDs of deleted rows |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_row`
|
||||
|
||||
Get a single row by its ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
| `rowId` | string | Yes | Row ID to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether row was found |
|
||||
| `row` | object | Row data |
|
||||
| `message` | string | Status message |
|
||||
|
||||
### `table_get_schema`
|
||||
|
||||
Get the schema definition for a table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `tableId` | string | Yes | Table ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether schema was retrieved |
|
||||
| `name` | string | Table name |
|
||||
| `columns` | array | Array of column definitions |
|
||||
| `message` | string | Status message |
|
||||
|
||||
## Filter Operators
|
||||
|
||||
Filters use MongoDB-style operators for flexible querying:
|
||||
|
||||
| Operator | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
|
||||
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
|
||||
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
|
||||
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
|
||||
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
|
||||
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
|
||||
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
|
||||
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
|
||||
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
|
||||
|
||||
### Combining Filters
|
||||
|
||||
Multiple field conditions are combined with AND logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "active",
|
||||
"age": {"$gte": 18}
|
||||
}
|
||||
```
|
||||
|
||||
Use `$or` for OR logic:
|
||||
|
||||
```json
|
||||
{
|
||||
"$or": [
|
||||
{"status": "active"},
|
||||
{"status": "pending"}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Sort Specification
|
||||
|
||||
Specify sort order with column names and direction:
|
||||
|
||||
```json
|
||||
{
|
||||
"createdAt": "desc"
|
||||
}
|
||||
```
|
||||
|
||||
Multi-column sorting:
|
||||
|
||||
```json
|
||||
{
|
||||
"priority": "desc",
|
||||
"name": "asc"
|
||||
}
|
||||
```
|
||||
|
||||
## Built-in Columns
|
||||
|
||||
Every row automatically includes:
|
||||
|
||||
| Column | Type | Description |
|
||||
|--------|------|-------------|
|
||||
| `id` | string | Unique row identifier |
|
||||
| `createdAt` | date | When the row was created |
|
||||
| `updatedAt` | date | When the row was last modified |
|
||||
|
||||
These can be used in filters and sorting.
|
||||
|
||||
## Limits
|
||||
|
||||
| Resource | Limit |
|
||||
|----------|-------|
|
||||
| Tables per workspace | 100 |
|
||||
| Rows per table | 10,000 |
|
||||
| Columns per table | 50 |
|
||||
| Max row size | 100KB |
|
||||
| String value length | 10,000 characters |
|
||||
| Query limit | 1,000 rows |
|
||||
| Batch insert size | 1,000 rows |
|
||||
| Bulk update/delete | 1,000 rows |
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `blocks`
|
||||
- Type: `table`
|
||||
- Tables are scoped to workspaces and accessible from any workflow within that workspace
|
||||
- Data persists across workflow executions
|
||||
- Use unique constraints to enable upsert functionality
|
||||
- The visual filter/sort builder provides an easy way to construct queries without writing JSON
|
||||
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
138
apps/sim/app/api/table/[tableId]/route.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { deleteTable, type TableSchema } from '@/lib/table'
|
||||
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
|
||||
|
||||
const logger = createLogger('TableDetailAPI')
|
||||
|
||||
const GetTableSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface TableRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId] - Retrieves a single table's details. */
|
||||
export async function GET(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table access attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
|
||||
|
||||
const schemaData = table.schema as TableSchema
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: {
|
||||
columns: schemaData.columns.map(normalizeColumn),
|
||||
},
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt:
|
||||
table.createdAt instanceof Date
|
||||
? table.createdAt.toISOString()
|
||||
: String(table.createdAt),
|
||||
updatedAt:
|
||||
table.updatedAt instanceof Date
|
||||
? table.updatedAt.toISOString()
|
||||
: String(table.updatedAt),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
|
||||
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
await deleteTable(tableId, requestId)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Table deleted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
276
apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts
Normal file
276
apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { RowData, TableSchema } from '@/lib/table'
|
||||
import { validateRowData } from '@/lib/table'
|
||||
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
|
||||
|
||||
const logger = createLogger('TableRowAPI')
|
||||
|
||||
const GetRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
const UpdateRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
const DeleteRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface RowRouteParams {
|
||||
params: Promise<{ tableId: string; rowId: string }>
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */
|
||||
export async function GET(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetRowSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
createdAt: userTableRows.createdAt,
|
||||
updatedAt: userTableRows.updatedAt,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!row) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error getting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to get row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
|
||||
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpdateRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Fetch existing row to support partial updates
|
||||
const [existingRow] = await db
|
||||
.select({ data: userTableRows.data })
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!existingRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Merge existing data with incoming partial data (incoming takes precedence)
|
||||
const mergedData = {
|
||||
...(existingRow.data as RowData),
|
||||
...(validated.data as RowData),
|
||||
}
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData: mergedData,
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
excludeRowId: rowId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const [updatedRow] = await db
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: mergedData,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (!updatedRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row updated successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error updating row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to update row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */
|
||||
export async function DELETE(request: NextRequest, { params }: RowRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId, rowId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = DeleteRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [deletedRow] = await db
|
||||
.delete(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId)
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
|
||||
if (!deletedRow) {
|
||||
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Row deleted successfully',
|
||||
deletedCount: 1,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
681
apps/sim/app/api/table/[tableId]/rows/route.ts
Normal file
681
apps/sim/app/api/table/[tableId]/rows/route.ts
Normal file
@@ -0,0 +1,681 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { Filter, RowData, Sort, TableSchema } from '@/lib/table'
|
||||
import {
|
||||
checkUniqueConstraintsDb,
|
||||
getUniqueColumns,
|
||||
TABLE_LIMITS,
|
||||
USER_TABLE_ROWS_SQL_NAME,
|
||||
validateBatchRows,
|
||||
validateRowAgainstSchema,
|
||||
validateRowData,
|
||||
validateRowSize,
|
||||
} from '@/lib/table'
|
||||
import { buildFilterClause, buildSortClause } from '@/lib/table/sql'
|
||||
import { accessError, checkAccess } from '../../utils'
|
||||
|
||||
const logger = createLogger('TableRowsAPI')
|
||||
|
||||
const InsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
const BatchInsertRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
rows: z
|
||||
.array(z.record(z.unknown()), { required_error: 'Rows array is required' })
|
||||
.min(1, 'At least one row is required')
|
||||
.max(1000, 'Cannot insert more than 1000 rows per batch'),
|
||||
})
|
||||
|
||||
const QueryRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown()).optional(),
|
||||
sort: z.record(z.enum(['asc', 'desc'])).optional(),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
|
||||
.optional()
|
||||
.default(100),
|
||||
offset: z.coerce
|
||||
.number({ required_error: 'Offset must be a number' })
|
||||
.int('Offset must be an integer')
|
||||
.min(0, 'Offset must be 0 or greater')
|
||||
.optional()
|
||||
.default(0),
|
||||
})
|
||||
|
||||
const UpdateRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
|
||||
data: z.record(z.unknown(), { required_error: 'Update data is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot update more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
const DeleteRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot delete more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
interface TableRowsRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
async function handleBatchInsert(
|
||||
requestId: string,
|
||||
tableId: string,
|
||||
body: z.infer<typeof BatchInsertRowsSchema>,
|
||||
userId: string
|
||||
): Promise<NextResponse> {
|
||||
const validated = BatchInsertRowsSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const workspaceId = validated.workspaceId
|
||||
|
||||
const remainingCapacity = table.maxRows - table.rowCount
|
||||
if (remainingCapacity < validated.rows.length) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const validation = await validateBatchRows({
|
||||
rows: validated.rows as RowData[],
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const now = new Date()
|
||||
const rowsToInsert = validated.rows.map((data) => ({
|
||||
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
|
||||
tableId,
|
||||
workspaceId,
|
||||
data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: userId,
|
||||
}))
|
||||
|
||||
const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning()
|
||||
|
||||
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
rows: insertedRows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
insertedCount: insertedRows.length,
|
||||
message: `Successfully inserted ${insertedRows.length} rows`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */
|
||||
export async function POST(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
|
||||
if (
|
||||
typeof body === 'object' &&
|
||||
body !== null &&
|
||||
'rows' in body &&
|
||||
Array.isArray((body as Record<string, unknown>).rows)
|
||||
) {
|
||||
return handleBatchInsert(
|
||||
requestId,
|
||||
tableId,
|
||||
body as z.infer<typeof BatchInsertRowsSchema>,
|
||||
authResult.userId
|
||||
)
|
||||
}
|
||||
|
||||
const validated = InsertRowSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const workspaceId = validated.workspaceId
|
||||
const rowData = validated.data as RowData
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData,
|
||||
schema: table.schema as TableSchema,
|
||||
tableId,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
if (table.rowCount >= table.maxRows) {
|
||||
return NextResponse.json(
|
||||
{ error: `Table row limit reached (${table.maxRows} rows max)` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
|
||||
const now = new Date()
|
||||
|
||||
const [row] = await db
|
||||
.insert(userTableRows)
|
||||
.values({
|
||||
id: rowId,
|
||||
tableId,
|
||||
workspaceId,
|
||||
data: validated.data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: authResult.userId,
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row inserted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error inserting row:`, error)
|
||||
return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */
|
||||
export async function GET(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
const filterParam = searchParams.get('filter')
|
||||
const sortParam = searchParams.get('sort')
|
||||
const limit = searchParams.get('limit')
|
||||
const offset = searchParams.get('offset')
|
||||
|
||||
let filter: Record<string, unknown> | undefined
|
||||
let sort: Sort | undefined
|
||||
|
||||
try {
|
||||
if (filterParam) {
|
||||
filter = JSON.parse(filterParam) as Record<string, unknown>
|
||||
}
|
||||
if (sortParam) {
|
||||
sort = JSON.parse(sortParam) as Sort
|
||||
}
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validated = QueryRowsSchema.parse({
|
||||
workspaceId,
|
||||
filter,
|
||||
sort,
|
||||
limit,
|
||||
offset,
|
||||
})
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
if (validated.filter) {
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
}
|
||||
|
||||
let query = db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
createdAt: userTableRows.createdAt,
|
||||
updatedAt: userTableRows.updatedAt,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.sort) {
|
||||
const schema = table.schema as TableSchema
|
||||
const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns)
|
||||
if (sortClause) {
|
||||
query = query.orderBy(sortClause) as typeof query
|
||||
}
|
||||
} else {
|
||||
query = query.orderBy(userTableRows.createdAt) as typeof query
|
||||
}
|
||||
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
const [{ count: totalCount }] = await countQuery
|
||||
|
||||
const rows = await query.limit(validated.limit).offset(validated.offset)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error querying rows:`, error)
|
||||
return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */
|
||||
export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpdateRowsByFilterSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const updateData = validated.data as RowData
|
||||
|
||||
const sizeValidation = validateRowSize(updateData)
|
||||
if (!sizeValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid row data', details: sizeValidation.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
|
||||
let matchingRowsQuery = db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.limit) {
|
||||
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
|
||||
}
|
||||
|
||||
const matchingRows = await matchingRowsQuery
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
updatedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) {
|
||||
logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`)
|
||||
}
|
||||
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...updateData }
|
||||
const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema)
|
||||
if (!rowValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Updated data does not match schema',
|
||||
details: rowValidation.errors,
|
||||
affectedRowId: row.id,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
// If updating multiple rows, check that updateData doesn't set any unique column
|
||||
// (would cause all rows to have the same value, violating uniqueness)
|
||||
if (matchingRows.length > 1) {
|
||||
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData)
|
||||
if (uniqueColumnsInUpdate.length > 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Cannot set unique column values when updating multiple rows',
|
||||
details: [
|
||||
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
|
||||
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`,
|
||||
],
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check unique constraints against database for each row
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...updateData }
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(
|
||||
tableId,
|
||||
mergedData,
|
||||
table.schema as TableSchema,
|
||||
row.id
|
||||
)
|
||||
|
||||
if (!uniqueValidation.valid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unique constraint violation',
|
||||
details: uniqueValidation.errors,
|
||||
affectedRowId: row.id,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
let totalUpdated = 0
|
||||
|
||||
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
const updatePromises = batch.map((row) => {
|
||||
const existingData = row.data as RowData
|
||||
return trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: { ...existingData, ...updateData },
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, row.id))
|
||||
})
|
||||
await Promise.all(updatePromises)
|
||||
totalUpdated += batch.length
|
||||
logger.info(
|
||||
`[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows updated successfully',
|
||||
updatedCount: matchingRows.length,
|
||||
updatedRowIds: matchingRows.map((r) => r.id),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error updating rows by filter:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to update rows: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */
|
||||
export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = DeleteRowsByFilterSchema.parse(body)
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
|
||||
|
||||
const { table } = accessResult
|
||||
|
||||
if (validated.workspaceId !== table.workspaceId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseConditions = [
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
]
|
||||
|
||||
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
|
||||
if (filterClause) {
|
||||
baseConditions.push(filterClause)
|
||||
}
|
||||
|
||||
let matchingRowsQuery = db
|
||||
.select({ id: userTableRows.id })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
if (validated.limit) {
|
||||
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
|
||||
}
|
||||
|
||||
const matchingRows = await matchingRowsQuery
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
deletedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (matchingRows.length > TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
logger.warn(`[${requestId}] Deleting ${matchingRows.length} rows. This may take some time.`)
|
||||
}
|
||||
|
||||
const rowIds = matchingRows.map((r) => r.id)
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
let totalDeleted = 0
|
||||
|
||||
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
|
||||
await trx.delete(userTableRows).where(
|
||||
and(
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
|
||||
batch.map((id) => sql`${id}`),
|
||||
sql`, `
|
||||
)}])`
|
||||
)
|
||||
)
|
||||
totalDeleted += batch.length
|
||||
logger.info(
|
||||
`[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows deleted successfully',
|
||||
deletedCount: matchingRows.length,
|
||||
deletedRowIds: rowIds,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting rows by filter:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to delete rows: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
182
apps/sim/app/api/table/[tableId]/rows/upsert/route.ts
Normal file
182
apps/sim/app/api/table/[tableId]/rows/upsert/route.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, or, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { RowData, TableSchema } from '@/lib/table'
|
||||
import { getUniqueColumns, validateRowData } from '@/lib/table'
|
||||
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
|
||||
|
||||
const logger = createLogger('TableUpsertAPI')
|
||||
|
||||
const UpsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
interface UpsertRouteParams {
|
||||
params: Promise<{ tableId: string }>
|
||||
}
|
||||
|
||||
/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */
|
||||
export async function POST(request: NextRequest, { params }: UpsertRouteParams) {
|
||||
const requestId = generateRequestId()
|
||||
const { tableId } = await params
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const validated = UpsertRowSchema.parse(body)
|
||||
|
||||
const result = await checkAccess(tableId, authResult.userId, 'write')
|
||||
if (!result.ok) return accessError(result, requestId, tableId)
|
||||
|
||||
const { table } = result
|
||||
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
|
||||
const schema = table.schema as TableSchema
|
||||
const rowData = validated.data as RowData
|
||||
|
||||
const validation = await validateRowData({
|
||||
rowData,
|
||||
schema,
|
||||
tableId,
|
||||
checkUnique: false,
|
||||
})
|
||||
if (!validation.valid) return validation.response
|
||||
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
|
||||
if (uniqueColumns.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const uniqueFilters = uniqueColumns.map((col) => {
|
||||
const value = rowData[col.name]
|
||||
if (value === undefined || value === null) {
|
||||
return null
|
||||
}
|
||||
return sql`${userTableRows.data}->>${col.name} = ${String(value)}`
|
||||
})
|
||||
|
||||
const validUniqueFilters = uniqueFilters.filter((f): f is Exclude<typeof f, null> => f !== null)
|
||||
|
||||
if (validUniqueFilters.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const [existingRow] = await db
|
||||
.select()
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, validated.workspaceId),
|
||||
or(...validUniqueFilters)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const now = new Date()
|
||||
|
||||
if (!existingRow && table.rowCount >= table.maxRows) {
|
||||
return NextResponse.json(
|
||||
{ error: `Table row limit reached (${table.maxRows} rows max)` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const upsertResult = await db.transaction(async (trx) => {
|
||||
if (existingRow) {
|
||||
const [updatedRow] = await trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: validated.data,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, existingRow.id))
|
||||
.returning()
|
||||
|
||||
return {
|
||||
row: updatedRow,
|
||||
operation: 'update' as const,
|
||||
}
|
||||
}
|
||||
|
||||
const [insertedRow] = await trx
|
||||
.insert(userTableRows)
|
||||
.values({
|
||||
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
|
||||
tableId,
|
||||
workspaceId: validated.workspaceId,
|
||||
data: validated.data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
createdBy: authResult.userId,
|
||||
})
|
||||
.returning()
|
||||
|
||||
return {
|
||||
row: insertedRow,
|
||||
operation: 'insert' as const,
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: upsertResult.row.id,
|
||||
data: upsertResult.row.data,
|
||||
createdAt: upsertResult.row.createdAt.toISOString(),
|
||||
updatedAt: upsertResult.row.updatedAt.toISOString(),
|
||||
},
|
||||
operation: upsertResult.operation,
|
||||
message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error upserting row:`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const detailedError = `Failed to upsert row: ${errorMessage}`
|
||||
|
||||
return NextResponse.json({ error: detailedError }, { status: 500 })
|
||||
}
|
||||
}
|
||||
293
apps/sim/app/api/table/route.ts
Normal file
293
apps/sim/app/api/table/route.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
canCreateTable,
|
||||
createTable,
|
||||
getWorkspaceTableLimits,
|
||||
listTables,
|
||||
TABLE_LIMITS,
|
||||
type TableSchema,
|
||||
} from '@/lib/table'
|
||||
import { normalizeColumn } from './utils'
|
||||
|
||||
const logger = createLogger('TableAPI')
|
||||
|
||||
const ColumnSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Column name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
|
||||
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
|
||||
errorMap: () => ({
|
||||
message: 'Column type must be one of: string, number, boolean, date, json',
|
||||
}),
|
||||
}),
|
||||
required: z.boolean().optional().default(false),
|
||||
unique: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
const CreateTableSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1, 'Table name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
|
||||
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
description: z
|
||||
.string()
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
|
||||
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
|
||||
)
|
||||
.optional(),
|
||||
schema: z.object({
|
||||
columns: z
|
||||
.array(ColumnSchema)
|
||||
.min(1, 'Table must have at least one column')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
|
||||
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
|
||||
),
|
||||
}),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
const ListTablesSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
interface WorkspaceAccessResult {
|
||||
hasAccess: boolean
|
||||
canWrite: boolean
|
||||
}
|
||||
|
||||
async function checkWorkspaceAccess(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceAccessResult> {
|
||||
const [workspaceData] = await db
|
||||
.select({
|
||||
id: workspace.id,
|
||||
ownerId: workspace.ownerId,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceData) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
if (workspaceData.ownerId === userId) {
|
||||
return { hasAccess: true, canWrite: true }
|
||||
}
|
||||
|
||||
const [permission] = await db
|
||||
.select({
|
||||
permissionType: permissions.permissionType,
|
||||
})
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permission) {
|
||||
return { hasAccess: false, canWrite: false }
|
||||
}
|
||||
|
||||
const canWrite = permission.permissionType === 'admin' || permission.permissionType === 'write'
|
||||
|
||||
return {
|
||||
hasAccess: true,
|
||||
canWrite,
|
||||
}
|
||||
}
|
||||
|
||||
/** POST /api/table - Creates a new user-defined table. */
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body: unknown = await request.json()
|
||||
const params = CreateTableSchema.parse(body)
|
||||
|
||||
const { hasAccess, canWrite } = await checkWorkspaceAccess(
|
||||
params.workspaceId,
|
||||
authResult.userId
|
||||
)
|
||||
|
||||
if (!hasAccess || !canWrite) {
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check billing plan limits
|
||||
const existingTables = await listTables(params.workspaceId)
|
||||
const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length)
|
||||
|
||||
if (!canCreate) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`,
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Get plan-based row limits
|
||||
const planLimits = await getWorkspaceTableLimits(params.workspaceId)
|
||||
const maxRowsPerTable = planLimits.maxRowsPerTable
|
||||
|
||||
const normalizedSchema: TableSchema = {
|
||||
columns: params.schema.columns.map(normalizeColumn),
|
||||
}
|
||||
|
||||
const table = await createTable(
|
||||
{
|
||||
name: params.name,
|
||||
description: params.description,
|
||||
schema: normalizedSchema,
|
||||
workspaceId: params.workspaceId,
|
||||
userId: authResult.userId,
|
||||
maxRows: maxRowsPerTable,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: table.schema,
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt:
|
||||
table.createdAt instanceof Date
|
||||
? table.createdAt.toISOString()
|
||||
: String(table.createdAt),
|
||||
updatedAt:
|
||||
table.updatedAt instanceof Date
|
||||
? table.updatedAt.toISOString()
|
||||
: String(table.updatedAt),
|
||||
},
|
||||
message: 'Table created successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
if (
|
||||
error.message.includes('Invalid table name') ||
|
||||
error.message.includes('Invalid schema') ||
|
||||
error.message.includes('already exists') ||
|
||||
error.message.includes('maximum table limit')
|
||||
) {
|
||||
return NextResponse.json({ error: error.message }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error creating table:`, error)
|
||||
return NextResponse.json({ error: 'Failed to create table' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/** GET /api/table - Lists all tables in a workspace. */
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const workspaceId = searchParams.get('workspaceId')
|
||||
|
||||
const validation = ListTablesSchema.safeParse({ workspaceId })
|
||||
if (!validation.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: validation.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const params = validation.data
|
||||
|
||||
const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId)
|
||||
|
||||
if (!hasAccess) {
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const tables = await listTables(params.workspaceId)
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
tables: tables.map((t) => {
|
||||
const schemaData = t.schema as TableSchema
|
||||
return {
|
||||
...t,
|
||||
schema: {
|
||||
columns: schemaData.columns.map(normalizeColumn),
|
||||
},
|
||||
createdAt:
|
||||
t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt),
|
||||
updatedAt:
|
||||
t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt),
|
||||
}
|
||||
}),
|
||||
totalCount: tables.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Validation error', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error listing tables:`, error)
|
||||
return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
188
apps/sim/app/api/table/utils.ts
Normal file
188
apps/sim/app/api/table/utils.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NextResponse } from 'next/server'
|
||||
import type { ColumnDefinition, TableDefinition } from '@/lib/table'
|
||||
import { getTableById } from '@/lib/table'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('TableUtils')
|
||||
|
||||
export interface TableAccessResult {
|
||||
hasAccess: true
|
||||
table: TableDefinition
|
||||
}
|
||||
|
||||
export interface TableAccessDenied {
|
||||
hasAccess: false
|
||||
notFound?: boolean
|
||||
reason?: string
|
||||
}
|
||||
|
||||
export type TableAccessCheck = TableAccessResult | TableAccessDenied
|
||||
|
||||
export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 }
|
||||
|
||||
export interface ApiErrorResponse {
|
||||
error: string
|
||||
details?: unknown
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user has read access to a table.
|
||||
* Read access is granted if:
|
||||
* 1. User created the table, OR
|
||||
* 2. User has any permission on the table's workspace (read, write, or admin)
|
||||
*
|
||||
* Follows the same pattern as Knowledge Base access checks.
|
||||
*/
|
||||
export async function checkTableAccess(tableId: string, userId: string): Promise<TableAccessCheck> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { hasAccess: false, notFound: true }
|
||||
}
|
||||
|
||||
// Case 1: User created the table
|
||||
if (table.createdBy === userId) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
// Case 2: Table belongs to a workspace the user has permissions for
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
if (userPermission !== null) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
return { hasAccess: false, reason: 'User does not have access to this table' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a user has write access to a table.
|
||||
* Write access is granted if:
|
||||
* 1. User created the table, OR
|
||||
* 2. User has write or admin permissions on the table's workspace
|
||||
*
|
||||
* Follows the same pattern as Knowledge Base write access checks.
|
||||
*/
|
||||
export async function checkTableWriteAccess(
|
||||
tableId: string,
|
||||
userId: string
|
||||
): Promise<TableAccessCheck> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { hasAccess: false, notFound: true }
|
||||
}
|
||||
|
||||
// Case 1: User created the table
|
||||
if (table.createdBy === userId) {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
// Case 2: Table belongs to a workspace and user has write/admin permissions
|
||||
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
if (userPermission === 'write' || userPermission === 'admin') {
|
||||
return { hasAccess: true, table }
|
||||
}
|
||||
|
||||
return { hasAccess: false, reason: 'User does not have write access to this table' }
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use checkTableAccess or checkTableWriteAccess instead.
|
||||
* Legacy access check function for backwards compatibility.
|
||||
*/
|
||||
export async function checkAccess(
|
||||
tableId: string,
|
||||
userId: string,
|
||||
level: 'read' | 'write' | 'admin' = 'read'
|
||||
): Promise<AccessResult> {
|
||||
const table = await getTableById(tableId)
|
||||
|
||||
if (!table) {
|
||||
return { ok: false, status: 404 }
|
||||
}
|
||||
|
||||
if (table.createdBy === userId) {
|
||||
return { ok: true, table }
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
|
||||
const hasAccess =
|
||||
permission !== null &&
|
||||
(level === 'read' ||
|
||||
(level === 'write' && (permission === 'write' || permission === 'admin')) ||
|
||||
(level === 'admin' && permission === 'admin'))
|
||||
|
||||
return hasAccess ? { ok: true, table } : { ok: false, status: 403 }
|
||||
}
|
||||
|
||||
export function accessError(
|
||||
result: { ok: false; status: 404 | 403 },
|
||||
requestId: string,
|
||||
context?: string
|
||||
): NextResponse {
|
||||
const message = result.status === 404 ? 'Table not found' : 'Access denied'
|
||||
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
|
||||
return NextResponse.json({ error: message }, { status: result.status })
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a TableAccessDenied result to an appropriate HTTP response.
|
||||
* Use with checkTableAccess or checkTableWriteAccess.
|
||||
*/
|
||||
export function tableAccessError(
|
||||
result: TableAccessDenied,
|
||||
requestId: string,
|
||||
context?: string
|
||||
): NextResponse {
|
||||
const status = result.notFound ? 404 : 403
|
||||
const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied')
|
||||
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
|
||||
return NextResponse.json({ error: message }, { status })
|
||||
}
|
||||
|
||||
export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise<boolean> {
|
||||
const table = await getTableById(tableId)
|
||||
return table?.workspaceId === workspaceId
|
||||
}
|
||||
|
||||
export function errorResponse(
|
||||
message: string,
|
||||
status: number,
|
||||
details?: unknown
|
||||
): NextResponse<ApiErrorResponse> {
|
||||
const body: ApiErrorResponse = { error: message }
|
||||
if (details !== undefined) {
|
||||
body.details = details
|
||||
}
|
||||
return NextResponse.json(body, { status })
|
||||
}
|
||||
|
||||
export function badRequestResponse(message: string, details?: unknown) {
|
||||
return errorResponse(message, 400, details)
|
||||
}
|
||||
|
||||
export function unauthorizedResponse(message = 'Authentication required') {
|
||||
return errorResponse(message, 401)
|
||||
}
|
||||
|
||||
export function forbiddenResponse(message = 'Access denied') {
|
||||
return errorResponse(message, 403)
|
||||
}
|
||||
|
||||
export function notFoundResponse(message = 'Resource not found') {
|
||||
return errorResponse(message, 404)
|
||||
}
|
||||
|
||||
export function serverErrorResponse(message = 'Internal server error') {
|
||||
return errorResponse(message, 500)
|
||||
}
|
||||
|
||||
export function normalizeColumn(col: ColumnDefinition): ColumnDefinition {
|
||||
return {
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { enrichTableSchema } from '@/lib/table/llm/wand'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import { getModelPricing } from '@/providers/utils'
|
||||
|
||||
@@ -60,6 +61,7 @@ interface RequestBody {
|
||||
history?: ChatMessage[]
|
||||
workflowId?: string
|
||||
generationType?: string
|
||||
wandContext?: Record<string, unknown>
|
||||
}
|
||||
|
||||
function safeStringify(value: unknown): string {
|
||||
@@ -70,6 +72,38 @@ function safeStringify(value: unknown): string {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wand enricher function type.
|
||||
* Enrichers add context to the system prompt based on generationType.
|
||||
*/
|
||||
type WandEnricher = (
|
||||
workspaceId: string | null,
|
||||
context: Record<string, unknown>
|
||||
) => Promise<string | null>
|
||||
|
||||
/**
|
||||
* Registry of wand enrichers by generationType.
|
||||
* Each enricher returns additional context to append to the system prompt.
|
||||
*/
|
||||
const wandEnrichers: Partial<Record<string, WandEnricher>> = {
|
||||
timestamp: async () => {
|
||||
const now = new Date()
|
||||
return `Current date and time context for reference:
|
||||
- Current UTC timestamp: ${now.toISOString()}
|
||||
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
|
||||
- Current Unix timestamp (milliseconds): ${now.getTime()}
|
||||
- Current date (UTC): ${now.toISOString().split('T')[0]}
|
||||
- Current year: ${now.getUTCFullYear()}
|
||||
- Current month: ${now.getUTCMonth() + 1}
|
||||
- Current day of month: ${now.getUTCDate()}
|
||||
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
|
||||
|
||||
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
|
||||
},
|
||||
|
||||
'table-schema': enrichTableSchema,
|
||||
}
|
||||
|
||||
async function updateUserStatsForWand(
|
||||
userId: string,
|
||||
usage: {
|
||||
@@ -159,7 +193,15 @@ export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const body = (await req.json()) as RequestBody
|
||||
|
||||
const { prompt, systemPrompt, stream = false, history = [], workflowId, generationType } = body
|
||||
const {
|
||||
prompt,
|
||||
systemPrompt,
|
||||
stream = false,
|
||||
history = [],
|
||||
workflowId,
|
||||
generationType,
|
||||
wandContext = {},
|
||||
} = body
|
||||
|
||||
if (!prompt) {
|
||||
logger.warn(`[${requestId}] Invalid request: Missing prompt.`)
|
||||
@@ -227,20 +269,15 @@ export async function POST(req: NextRequest) {
|
||||
systemPrompt ||
|
||||
'You are a helpful AI assistant. Generate content exactly as requested by the user.'
|
||||
|
||||
if (generationType === 'timestamp') {
|
||||
const now = new Date()
|
||||
const currentTimeContext = `\n\nCurrent date and time context for reference:
|
||||
- Current UTC timestamp: ${now.toISOString()}
|
||||
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
|
||||
- Current Unix timestamp (milliseconds): ${now.getTime()}
|
||||
- Current date (UTC): ${now.toISOString().split('T')[0]}
|
||||
- Current year: ${now.getUTCFullYear()}
|
||||
- Current month: ${now.getUTCMonth() + 1}
|
||||
- Current day of month: ${now.getUTCDate()}
|
||||
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
|
||||
|
||||
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
|
||||
finalSystemPrompt += currentTimeContext
|
||||
// Apply enricher if one exists for this generationType
|
||||
if (generationType) {
|
||||
const enricher = wandEnrichers[generationType]
|
||||
if (enricher) {
|
||||
const enrichment = await enricher(workspaceId, wandContext)
|
||||
if (enrichment) {
|
||||
finalSystemPrompt += `\n\n${enrichment}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (generationType === 'json-object') {
|
||||
|
||||
@@ -555,7 +555,7 @@ export function DocumentTagsModal({
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
variant={canSaveTag ? 'tertiary' : 'default'}
|
||||
onClick={saveDocumentTag}
|
||||
className='flex-1'
|
||||
disabled={!canSaveTag}
|
||||
|
||||
@@ -300,7 +300,7 @@ export function EditChunkModal({
|
||||
</Button>
|
||||
{userPermissions.canEdit && (
|
||||
<Button
|
||||
variant='tertiary'
|
||||
variant={hasUnsavedChanges ? 'tertiary' : 'default'}
|
||||
onClick={handleSaveContent}
|
||||
type='button'
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
|
||||
@@ -39,6 +39,9 @@ export function RenameDocumentModal({
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
// Check if name has changed from initial value
|
||||
const hasChanges = name.trim() !== initialName.trim()
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setName(initialName)
|
||||
@@ -123,7 +126,11 @@ export function RenameDocumentModal({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}>
|
||||
<Button
|
||||
variant={hasChanges ? 'tertiary' : 'default'}
|
||||
type='submit'
|
||||
disabled={isSubmitting || !name?.trim() || !hasChanges}
|
||||
>
|
||||
{isSubmitting ? 'Renaming...' : 'Rename'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { Badge, Button, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -143,6 +143,7 @@ export function BaseCard({
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [isTagsModalOpen, setIsTagsModalOpen] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const menuButtonRef = useRef<HTMLButtonElement>(null)
|
||||
|
||||
const searchParams = new URLSearchParams({
|
||||
kbName: title,
|
||||
@@ -151,6 +152,23 @@ export function BaseCard({
|
||||
|
||||
const shortId = id ? `kb-${id.slice(0, 8)}` : ''
|
||||
|
||||
const handleMenuButtonClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
if (menuButtonRef.current) {
|
||||
const rect = menuButtonRef.current.getBoundingClientRect()
|
||||
const syntheticEvent = {
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
clientX: rect.right,
|
||||
clientY: rect.bottom,
|
||||
} as React.MouseEvent
|
||||
handleContextMenu(syntheticEvent)
|
||||
}
|
||||
},
|
||||
[handleContextMenu]
|
||||
)
|
||||
|
||||
const handleClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (isContextMenuOpen) {
|
||||
@@ -223,9 +241,24 @@ export function BaseCard({
|
||||
<h3 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{title}
|
||||
</h3>
|
||||
{shortId && (
|
||||
<Badge className='flex-shrink-0 rounded-[4px] text-[12px]'>{shortId}</Badge>
|
||||
)}
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
{shortId && (
|
||||
<Badge className='flex-shrink-0 rounded-[4px] text-[12px]'>{shortId}</Badge>
|
||||
)}
|
||||
<Button
|
||||
ref={menuButtonRef}
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-[20px] w-[20px] flex-shrink-0 p-0 text-[var(--text-tertiary)]'
|
||||
onClick={handleMenuButtonClick}
|
||||
>
|
||||
<svg className='h-[14px] w-[14px]' viewBox='0 0 16 16' fill='currentColor'>
|
||||
<circle cx='3' cy='8' r='1.5' />
|
||||
<circle cx='8' cy='8' r='1.5' />
|
||||
<circle cx='13' cy='8' r='1.5' />
|
||||
</svg>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-1 flex-col gap-[8px]'>
|
||||
|
||||
@@ -70,6 +70,12 @@ export function EditKnowledgeBaseModal({
|
||||
})
|
||||
|
||||
const nameValue = watch('name')
|
||||
const descriptionValue = watch('description')
|
||||
|
||||
// Check if form values have changed from initial values
|
||||
const hasChanges =
|
||||
nameValue?.trim() !== initialName.trim() ||
|
||||
(descriptionValue?.trim() || '') !== (initialDescription?.trim() || '')
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
@@ -159,9 +165,9 @@ export function EditKnowledgeBaseModal({
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
variant={hasChanges ? 'tertiary' : 'default'}
|
||||
type='submit'
|
||||
disabled={isSubmitting || !nameValue?.trim()}
|
||||
disabled={isSubmitting || !nameValue?.trim() || !hasChanges}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
|
||||
@@ -265,12 +265,11 @@ export function Knowledge() {
|
||||
</div>
|
||||
</div>
|
||||
) : error ? (
|
||||
<div className='col-span-full flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
Error loading knowledge bases
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>{error}</p>
|
||||
<div className='col-span-full flex h-64 items-center justify-center'>
|
||||
<div className='text-[var(--text-error)]'>
|
||||
<span className='text-[13px]'>
|
||||
Error: {typeof error === 'string' ? error : 'Failed to load knowledge bases'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button, TableCell, TableRow } from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
|
||||
interface LoadingRowsProps {
|
||||
columns: ColumnDefinition[]
|
||||
}
|
||||
|
||||
export function LoadingRows({ columns }: LoadingRowsProps) {
|
||||
return (
|
||||
<>
|
||||
{Array.from({ length: 25 }).map((_, rowIndex) => (
|
||||
<TableRow key={rowIndex}>
|
||||
<TableCell>
|
||||
<Skeleton className='h-[14px] w-[14px]' />
|
||||
</TableCell>
|
||||
{columns.map((col, colIndex) => {
|
||||
const baseWidth =
|
||||
col.type === 'json'
|
||||
? 200
|
||||
: col.type === 'string'
|
||||
? 160
|
||||
: col.type === 'number'
|
||||
? 80
|
||||
: col.type === 'boolean'
|
||||
? 50
|
||||
: col.type === 'date'
|
||||
? 100
|
||||
: 120
|
||||
const variation = ((rowIndex + colIndex) % 3) * 20
|
||||
const width = baseWidth + variation
|
||||
|
||||
return (
|
||||
<TableCell key={col.name}>
|
||||
<Skeleton className='h-[16px]' style={{ width: `${width}px` }} />
|
||||
</TableCell>
|
||||
)
|
||||
})}
|
||||
</TableRow>
|
||||
))}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
interface EmptyRowsProps {
|
||||
columnCount: number
|
||||
hasFilter: boolean
|
||||
onAddRow: () => void
|
||||
}
|
||||
|
||||
export function EmptyRows({ columnCount, hasFilter, onAddRow }: EmptyRowsProps) {
|
||||
return (
|
||||
<TableRow>
|
||||
<TableCell colSpan={columnCount + 1} className='h-[160px]'>
|
||||
<div className='flex h-full w-full items-center justify-center'>
|
||||
<div className='flex flex-col items-center gap-[12px]'>
|
||||
<span className='text-[13px] text-[var(--text-tertiary)]'>
|
||||
{hasFilter ? 'No rows match your filter' : 'No data'}
|
||||
</span>
|
||||
{!hasFilter && (
|
||||
<Button variant='default' size='sm' onClick={onAddRow}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add first row
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
import { STRING_TRUNCATE_LENGTH } from '../lib/constants'
|
||||
import type { CellViewerData } from '../lib/types'
|
||||
|
||||
interface CellRendererProps {
|
||||
value: unknown
|
||||
column: ColumnDefinition
|
||||
onCellClick: (columnName: string, value: unknown, type: CellViewerData['type']) => void
|
||||
}
|
||||
|
||||
export function CellRenderer({ value, column, onCellClick }: CellRendererProps) {
|
||||
const isNull = value === null || value === undefined
|
||||
|
||||
if (isNull) {
|
||||
return <span className='text-[var(--text-muted)] italic'>—</span>
|
||||
}
|
||||
|
||||
if (column.type === 'json') {
|
||||
const jsonStr = JSON.stringify(value)
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
className='block max-w-[300px] cursor-pointer select-none truncate rounded-[4px] border border-[var(--border-1)] px-[6px] py-[2px] text-left font-mono text-[11px] text-[var(--text-secondary)] transition-colors hover:border-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
onCellClick(column.name, value, 'json')
|
||||
}}
|
||||
title='Click to view full JSON'
|
||||
>
|
||||
{jsonStr}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
||||
if (column.type === 'boolean') {
|
||||
const boolValue = Boolean(value)
|
||||
return (
|
||||
<span className={boolValue ? 'text-green-500' : 'text-[var(--text-tertiary)]'}>
|
||||
{boolValue ? 'true' : 'false'}
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
if (column.type === 'number') {
|
||||
return (
|
||||
<span className='font-mono text-[12px] text-[var(--text-secondary)]'>{String(value)}</span>
|
||||
)
|
||||
}
|
||||
|
||||
if (column.type === 'date') {
|
||||
try {
|
||||
const date = new Date(String(value))
|
||||
const formatted = date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
className='cursor-pointer select-none text-left text-[12px] text-[var(--text-secondary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:text-[var(--text-primary)] hover:decoration-[var(--text-muted)]'
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
onCellClick(column.name, value, 'date')
|
||||
}}
|
||||
title='Click to view ISO format'
|
||||
>
|
||||
{formatted}
|
||||
</button>
|
||||
)
|
||||
} catch {
|
||||
return <span className='text-[var(--text-primary)]'>{String(value)}</span>
|
||||
}
|
||||
}
|
||||
|
||||
const strValue = String(value)
|
||||
if (strValue.length > STRING_TRUNCATE_LENGTH) {
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
className='block max-w-[300px] cursor-pointer select-none truncate text-left text-[var(--text-primary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:decoration-[var(--text-muted)]'
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
onCellClick(column.name, value, 'text')
|
||||
}}
|
||||
title='Click to view full text'
|
||||
>
|
||||
{strValue}
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
||||
return <span className='text-[var(--text-primary)]'>{strValue}</span>
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
import { Copy, X } from 'lucide-react'
|
||||
import { Badge, Button, Modal, ModalBody, ModalContent } from '@/components/emcn'
|
||||
import type { CellViewerData } from '../lib/types'
|
||||
|
||||
interface CellViewerModalProps {
|
||||
cellViewer: CellViewerData | null
|
||||
onClose: () => void
|
||||
onCopy: () => void
|
||||
copied: boolean
|
||||
}
|
||||
|
||||
export function CellViewerModal({ cellViewer, onClose, onCopy, copied }: CellViewerModalProps) {
|
||||
if (!cellViewer) return null
|
||||
|
||||
return (
|
||||
<Modal open={!!cellViewer} onOpenChange={(open) => !open && onClose()}>
|
||||
<ModalContent className='w-[640px] duration-100'>
|
||||
<div className='flex items-center justify-between gap-[8px] px-[16px] py-[10px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
<span className='truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{cellViewer.columnName}
|
||||
</span>
|
||||
<Badge
|
||||
variant={
|
||||
cellViewer.type === 'json' ? 'blue' : cellViewer.type === 'date' ? 'purple' : 'gray'
|
||||
}
|
||||
size='sm'
|
||||
>
|
||||
{cellViewer.type === 'json' ? 'JSON' : cellViewer.type === 'date' ? 'Date' : 'Text'}
|
||||
</Badge>
|
||||
</div>
|
||||
<div className='flex shrink-0 items-center gap-[8px]'>
|
||||
<Button variant={copied ? 'tertiary' : 'default'} size='sm' onClick={onCopy}>
|
||||
<Copy className='mr-[4px] h-[12px] w-[12px]' />
|
||||
{copied ? 'Copied!' : 'Copy'}
|
||||
</Button>
|
||||
<Button variant='ghost' size='sm' onClick={onClose}>
|
||||
<X className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<ModalBody className='p-0'>
|
||||
{cellViewer.type === 'json' ? (
|
||||
<pre className='m-[16px] max-h-[450px] overflow-auto rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] font-mono text-[12px] text-[var(--text-primary)] leading-[1.6]'>
|
||||
{JSON.stringify(cellViewer.value, null, 2)}
|
||||
</pre>
|
||||
) : cellViewer.type === 'date' ? (
|
||||
<div className='m-[16px] space-y-[12px]'>
|
||||
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
|
||||
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
|
||||
Formatted
|
||||
</div>
|
||||
<div className='text-[14px] text-[var(--text-primary)]'>
|
||||
{new Date(String(cellViewer.value)).toLocaleDateString('en-US', {
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
timeZoneName: 'short',
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
|
||||
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
|
||||
ISO Format
|
||||
</div>
|
||||
<div className='font-mono text-[13px] text-[var(--text-secondary)]'>
|
||||
{String(cellViewer.value)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className='m-[16px] max-h-[450px] overflow-auto whitespace-pre-wrap break-words rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] text-[13px] text-[var(--text-primary)] leading-[1.7]'>
|
||||
{String(cellViewer.value)}
|
||||
</div>
|
||||
)}
|
||||
</ModalBody>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
import { Edit, Trash2 } from 'lucide-react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { ContextMenuState } from '../lib/types'
|
||||
|
||||
interface ContextMenuProps {
|
||||
contextMenu: ContextMenuState
|
||||
onClose: () => void
|
||||
onEdit: () => void
|
||||
onDelete: () => void
|
||||
}
|
||||
|
||||
export function ContextMenu({ contextMenu, onClose, onEdit, onDelete }: ContextMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
open={contextMenu.isOpen}
|
||||
onOpenChange={(open) => !open && onClose()}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${contextMenu.position.x}px`,
|
||||
top: `${contextMenu.position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent align='start' side='bottom' sideOffset={4}>
|
||||
<PopoverItem onClick={onEdit}>
|
||||
<Edit className='mr-[8px] h-[12px] w-[12px]' />
|
||||
Edit row
|
||||
</PopoverItem>
|
||||
<PopoverDivider />
|
||||
<PopoverItem onClick={onDelete} className='text-[var(--text-error)]'>
|
||||
<Trash2 className='mr-[8px] h-[12px] w-[12px]' />
|
||||
Delete row
|
||||
</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { ChevronRight } from 'lucide-react'
|
||||
import { Checkbox, Input, Textarea } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
|
||||
interface EditableCellProps {
|
||||
value: unknown
|
||||
column: ColumnDefinition
|
||||
onChange: (value: unknown) => void
|
||||
isEditing?: boolean
|
||||
isNew?: boolean
|
||||
}
|
||||
|
||||
function formatValueForDisplay(value: unknown, type: string): string {
|
||||
if (value === null || value === undefined) return 'NULL'
|
||||
if (type === 'json') {
|
||||
return typeof value === 'string' ? value : JSON.stringify(value)
|
||||
}
|
||||
if (type === 'boolean') {
|
||||
return value ? 'TRUE' : 'FALSE'
|
||||
}
|
||||
if (type === 'date' && value) {
|
||||
try {
|
||||
const date = new Date(String(value))
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
} catch {
|
||||
return String(value)
|
||||
}
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
function formatValueForInput(value: unknown, type: string): string {
|
||||
if (value === null || value === undefined) return ''
|
||||
if (type === 'json') {
|
||||
return typeof value === 'string' ? value : JSON.stringify(value, null, 2)
|
||||
}
|
||||
if (type === 'date' && value) {
|
||||
try {
|
||||
const date = new Date(String(value))
|
||||
return date.toISOString().split('T')[0]
|
||||
} catch {
|
||||
return String(value)
|
||||
}
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
export function EditableCell({
|
||||
value,
|
||||
column,
|
||||
onChange,
|
||||
isEditing = false,
|
||||
isNew = false,
|
||||
}: EditableCellProps) {
|
||||
const [localValue, setLocalValue] = useState<unknown>(value)
|
||||
const [isActive, setIsActive] = useState(false)
|
||||
const inputRef = useRef<HTMLInputElement | HTMLTextAreaElement>(null)
|
||||
|
||||
useEffect(() => {
|
||||
setLocalValue(value)
|
||||
}, [value])
|
||||
|
||||
useEffect(() => {
|
||||
if (isActive && inputRef.current) {
|
||||
inputRef.current.focus()
|
||||
}
|
||||
}, [isActive])
|
||||
|
||||
const handleFocus = useCallback(() => {
|
||||
setIsActive(true)
|
||||
}, [])
|
||||
|
||||
const handleBlur = useCallback(() => {
|
||||
setIsActive(false)
|
||||
if (localValue !== value) {
|
||||
onChange(localValue)
|
||||
}
|
||||
}, [localValue, value, onChange])
|
||||
|
||||
const handleChange = useCallback((newValue: unknown) => {
|
||||
setLocalValue(newValue)
|
||||
}, [])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey && column.type !== 'json') {
|
||||
e.preventDefault()
|
||||
;(e.target as HTMLElement).blur()
|
||||
}
|
||||
if (e.key === 'Escape') {
|
||||
setLocalValue(value)
|
||||
;(e.target as HTMLElement).blur()
|
||||
}
|
||||
},
|
||||
[value, column.type]
|
||||
)
|
||||
|
||||
const isNull = value === null || value === undefined
|
||||
|
||||
// Boolean type - always show checkbox
|
||||
if (column.type === 'boolean') {
|
||||
return (
|
||||
<div className='flex items-center'>
|
||||
<Checkbox
|
||||
size='sm'
|
||||
checked={Boolean(localValue)}
|
||||
onCheckedChange={(checked) => {
|
||||
const newValue = checked === true
|
||||
setLocalValue(newValue)
|
||||
onChange(newValue)
|
||||
}}
|
||||
/>
|
||||
<span
|
||||
className={cn(
|
||||
'ml-[8px] text-[12px]',
|
||||
localValue ? 'text-green-500' : 'text-[var(--text-tertiary)]'
|
||||
)}
|
||||
>
|
||||
{localValue ? 'TRUE' : 'FALSE'}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// JSON type - use textarea
|
||||
if (column.type === 'json') {
|
||||
if (isActive || isNew) {
|
||||
return (
|
||||
<Textarea
|
||||
ref={inputRef as React.RefObject<HTMLTextAreaElement>}
|
||||
value={formatValueForInput(localValue, column.type)}
|
||||
onChange={(e) => handleChange(e.target.value)}
|
||||
onFocus={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onKeyDown={handleKeyDown}
|
||||
className='h-[60px] min-w-[200px] resize-none font-mono text-[11px]'
|
||||
placeholder='{"key": "value"}'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleFocus}
|
||||
className={cn(
|
||||
'group flex max-w-[300px] cursor-pointer items-center truncate text-left font-mono text-[11px] transition-colors',
|
||||
isNull
|
||||
? 'text-[var(--text-muted)] italic'
|
||||
: 'text-[var(--text-secondary)] hover:text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
<span className='truncate'>{formatValueForDisplay(value, column.type)}</span>
|
||||
<ChevronRight className='ml-[4px] h-[10px] w-[10px] opacity-0 group-hover:opacity-100' />
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
||||
// Active/editing state for other types
|
||||
if (isActive || isNew) {
|
||||
return (
|
||||
<Input
|
||||
ref={inputRef as React.RefObject<HTMLInputElement>}
|
||||
type={column.type === 'number' ? 'number' : column.type === 'date' ? 'date' : 'text'}
|
||||
value={formatValueForInput(localValue, column.type)}
|
||||
onChange={(e) => handleChange(e.target.value)}
|
||||
onFocus={handleFocus}
|
||||
onBlur={handleBlur}
|
||||
onKeyDown={handleKeyDown}
|
||||
className={cn(
|
||||
'h-[28px] min-w-[120px] text-[12px]',
|
||||
column.type === 'number' && 'font-mono'
|
||||
)}
|
||||
placeholder={isNull ? 'NULL' : ''}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
// Display state
|
||||
return (
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleFocus}
|
||||
className={cn(
|
||||
'group flex max-w-[300px] cursor-pointer items-center truncate text-left text-[13px] transition-colors',
|
||||
isNull
|
||||
? 'text-[var(--text-muted)] italic'
|
||||
: column.type === 'number'
|
||||
? 'font-mono text-[12px] text-[var(--text-secondary)]'
|
||||
: 'text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
<span className='truncate'>{formatValueForDisplay(value, column.type)}</span>
|
||||
<ChevronRight className='ml-[4px] h-[10px] w-[10px] opacity-0 group-hover:opacity-100' />
|
||||
</button>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
'use client'
|
||||
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, TableCell, TableRow } from '@/components/emcn'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
import type { TempRow } from '../hooks/use-inline-editing'
|
||||
import { EditableCell } from './editable-cell'
|
||||
|
||||
interface EditableRowProps {
|
||||
row: TempRow
|
||||
columns: ColumnDefinition[]
|
||||
onUpdateCell: (tempId: string, column: string, value: unknown) => void
|
||||
onRemove: (tempId: string) => void
|
||||
}
|
||||
|
||||
export function EditableRow({ row, columns, onUpdateCell, onRemove }: EditableRowProps) {
|
||||
return (
|
||||
<TableRow className='bg-amber-500/20 hover:bg-amber-500/30'>
|
||||
<TableCell className='w-[40px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => onRemove(row.tempId)}
|
||||
className='h-[20px] w-[20px] p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</TableCell>
|
||||
{columns.map((column) => (
|
||||
<TableCell key={column.name}>
|
||||
<EditableCell
|
||||
value={row.data[column.name]}
|
||||
column={column}
|
||||
onChange={(value) => onUpdateCell(row.tempId, column.name, value)}
|
||||
isNew
|
||||
/>
|
||||
</TableCell>
|
||||
))}
|
||||
</TableRow>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { Plus, X } from 'lucide-react'
|
||||
import { Button, Combobox, Input } from '@/components/emcn'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { filterRulesToFilter } from '@/lib/table/query-builder/converters'
|
||||
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
|
||||
import type { ColumnDefinition } from '@/lib/table/types'
|
||||
import type { QueryOptions } from '../lib/types'
|
||||
|
||||
type Column = Pick<ColumnDefinition, 'name' | 'type'>
|
||||
|
||||
interface FilterPanelProps {
|
||||
columns: Column[]
|
||||
isVisible: boolean
|
||||
onApply: (options: QueryOptions) => void
|
||||
onClose: () => void
|
||||
isLoading?: boolean
|
||||
}
|
||||
|
||||
// Operators that don't need a value input
|
||||
const NO_VALUE_OPERATORS = ['is_null', 'is_not_null']
|
||||
|
||||
// Options for the first filter row
|
||||
const WHERE_OPTIONS = [{ value: 'where', label: 'where' }]
|
||||
|
||||
export function FilterPanel({
|
||||
columns,
|
||||
isVisible,
|
||||
onApply,
|
||||
onClose,
|
||||
isLoading = false,
|
||||
}: FilterPanelProps) {
|
||||
const [rules, setRules] = useState<FilterRule[]>([])
|
||||
|
||||
const columnOptions = useMemo(
|
||||
() => columns.map((col) => ({ value: col.name, label: col.name })),
|
||||
[columns]
|
||||
)
|
||||
|
||||
const {
|
||||
comparisonOptions,
|
||||
logicalOptions,
|
||||
addRule: handleAddRule,
|
||||
removeRule: handleRemoveRule,
|
||||
updateRule: handleUpdateRule,
|
||||
} = useFilterBuilder({
|
||||
columns: columnOptions,
|
||||
rules,
|
||||
setRules,
|
||||
})
|
||||
|
||||
// Auto-add first filter when panel opens with no filters
|
||||
useEffect(() => {
|
||||
if (isVisible && rules.length === 0 && columns.length > 0) {
|
||||
handleAddRule()
|
||||
}
|
||||
}, [isVisible, rules.length, columns.length, handleAddRule])
|
||||
|
||||
const handleApply = useCallback(() => {
|
||||
const filter = filterRulesToFilter(rules)
|
||||
onApply({ filter, sort: null })
|
||||
}, [rules, onApply])
|
||||
|
||||
const handleClear = useCallback(() => {
|
||||
setRules([])
|
||||
onApply({ filter: null, sort: null })
|
||||
onClose()
|
||||
}, [onApply, onClose])
|
||||
|
||||
if (!isVisible) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex shrink-0 flex-col gap-2 border-[var(--border)] border-b px-4 py-3'>
|
||||
{rules.map((rule, index) => {
|
||||
const needsValue = !NO_VALUE_OPERATORS.includes(rule.operator)
|
||||
const isFirst = index === 0
|
||||
|
||||
return (
|
||||
<div key={rule.id} className='flex items-center gap-2'>
|
||||
{/* Remove button */}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleRemoveRule(rule.id)}
|
||||
aria-label='Remove filter'
|
||||
className='shrink-0 p-1'
|
||||
>
|
||||
<X className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
|
||||
{/* Where / And / Or */}
|
||||
<div className='w-20 shrink-0'>
|
||||
{isFirst ? (
|
||||
<Combobox size='sm' options={WHERE_OPTIONS} value='where' onChange={() => {}} />
|
||||
) : (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={logicalOptions}
|
||||
value={rule.logicalOperator}
|
||||
onChange={(value) =>
|
||||
handleUpdateRule(rule.id, 'logicalOperator', value as 'and' | 'or')
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Column */}
|
||||
<div className='w-[140px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={columnOptions}
|
||||
value={rule.column}
|
||||
onChange={(value) => handleUpdateRule(rule.id, 'column', value)}
|
||||
placeholder='Column'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Operator */}
|
||||
<div className='w-[120px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={comparisonOptions}
|
||||
value={rule.operator}
|
||||
onChange={(value) => handleUpdateRule(rule.id, 'operator', value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Value (only if operator needs it) */}
|
||||
{needsValue && (
|
||||
<Input
|
||||
className='w-[160px] shrink-0'
|
||||
value={rule.value}
|
||||
onChange={(e) => handleUpdateRule(rule.id, 'value', e.target.value)}
|
||||
placeholder='Enter a value'
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter') {
|
||||
handleApply()
|
||||
}
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Actions - only on first row */}
|
||||
{isFirst && (
|
||||
<div className='ml-1 flex items-center gap-1'>
|
||||
<Button variant='tertiary' size='sm' onClick={handleApply} disabled={isLoading}>
|
||||
Apply
|
||||
</Button>
|
||||
|
||||
<Button variant='ghost' size='sm' onClick={handleAddRule}>
|
||||
<Plus className='h-3 w-3' />
|
||||
Add filter
|
||||
</Button>
|
||||
|
||||
<Button variant='ghost' size='sm' onClick={handleClear}>
|
||||
Clear filters
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
export * from './body-states'
|
||||
export * from './cell-renderer'
|
||||
export * from './cell-viewer-modal'
|
||||
export * from './context-menu'
|
||||
export * from './editable-cell'
|
||||
export * from './editable-row'
|
||||
export * from './filter-panel'
|
||||
export * from './row-modal'
|
||||
export * from './schema-modal'
|
||||
export * from './table-toolbar'
|
||||
export * from './table-viewer'
|
||||
@@ -0,0 +1,399 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Checkbox,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
|
||||
|
||||
const logger = createLogger('RowModal')
|
||||
|
||||
export interface RowModalProps {
|
||||
mode: 'add' | 'edit' | 'delete'
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
table: TableInfo
|
||||
row?: TableRow
|
||||
rowIds?: string[]
|
||||
onSuccess: () => void
|
||||
}
|
||||
|
||||
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
|
||||
const initial: Record<string, unknown> = {}
|
||||
columns.forEach((col) => {
|
||||
if (col.type === 'boolean') {
|
||||
initial[col.name] = false
|
||||
} else {
|
||||
initial[col.name] = ''
|
||||
}
|
||||
})
|
||||
return initial
|
||||
}
|
||||
|
||||
function cleanRowData(
|
||||
columns: ColumnDefinition[],
|
||||
rowData: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const cleanData: Record<string, unknown> = {}
|
||||
|
||||
columns.forEach((col) => {
|
||||
const value = rowData[col.name]
|
||||
if (col.type === 'number') {
|
||||
cleanData[col.name] = value === '' ? null : Number(value)
|
||||
} else if (col.type === 'json') {
|
||||
if (typeof value === 'string') {
|
||||
if (value === '') {
|
||||
cleanData[col.name] = null
|
||||
} else {
|
||||
try {
|
||||
cleanData[col.name] = JSON.parse(value)
|
||||
} catch {
|
||||
throw new Error(`Invalid JSON for field: ${col.name}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cleanData[col.name] = value
|
||||
}
|
||||
} else if (col.type === 'boolean') {
|
||||
cleanData[col.name] = Boolean(value)
|
||||
} else {
|
||||
cleanData[col.name] = value || null
|
||||
}
|
||||
})
|
||||
|
||||
return cleanData
|
||||
}
|
||||
|
||||
function formatValueForInput(value: unknown, type: string): string {
|
||||
if (value === null || value === undefined) return ''
|
||||
if (type === 'json') {
|
||||
return typeof value === 'string' ? value : JSON.stringify(value, null, 2)
|
||||
}
|
||||
if (type === 'date' && value) {
|
||||
try {
|
||||
const date = new Date(String(value))
|
||||
return date.toISOString().split('T')[0]
|
||||
} catch {
|
||||
return String(value)
|
||||
}
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
function isFieldEmpty(value: unknown, type: string): boolean {
|
||||
if (value === null || value === undefined) return true
|
||||
if (type === 'boolean') return false // booleans always have a value (true/false)
|
||||
if (typeof value === 'string') return value.trim() === ''
|
||||
return false
|
||||
}
|
||||
|
||||
export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess }: RowModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const schema = table?.schema
|
||||
const columns = schema?.columns || []
|
||||
|
||||
const [rowData, setRowData] = useState<Record<string, unknown>>({})
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
|
||||
// Check if all required fields are filled
|
||||
const hasRequiredFields = useMemo(() => {
|
||||
const requiredColumns = columns.filter((col) => col.required)
|
||||
return requiredColumns.every((col) => !isFieldEmpty(rowData[col.name], col.type))
|
||||
}, [columns, rowData])
|
||||
|
||||
// Initialize form data based on mode
|
||||
useEffect(() => {
|
||||
if (!isOpen) return
|
||||
|
||||
if (mode === 'add' && columns.length > 0) {
|
||||
setRowData(createInitialRowData(columns))
|
||||
} else if (mode === 'edit' && row) {
|
||||
setRowData(row.data)
|
||||
}
|
||||
}, [isOpen, mode, columns, row])
|
||||
|
||||
const handleFormSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
setError(null)
|
||||
setIsSubmitting(true)
|
||||
|
||||
try {
|
||||
const cleanData = cleanRowData(columns, rowData)
|
||||
|
||||
if (mode === 'add') {
|
||||
const res = await fetch(`/api/table/${table?.id}/rows`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId, data: cleanData }),
|
||||
})
|
||||
|
||||
const result: { error?: string } = await res.json()
|
||||
if (!res.ok) {
|
||||
throw new Error(result.error || 'Failed to add row')
|
||||
}
|
||||
} else if (mode === 'edit' && row) {
|
||||
const res = await fetch(`/api/table/${table?.id}/rows/${row.id}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId, data: cleanData }),
|
||||
})
|
||||
|
||||
const result: { error?: string } = await res.json()
|
||||
if (!res.ok) {
|
||||
throw new Error(result.error || 'Failed to update row')
|
||||
}
|
||||
}
|
||||
|
||||
onSuccess()
|
||||
} catch (err) {
|
||||
logger.error(`Failed to ${mode} row:`, err)
|
||||
setError(err instanceof Error ? err.message : `Failed to ${mode} row`)
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDelete = async () => {
|
||||
setError(null)
|
||||
setIsSubmitting(true)
|
||||
|
||||
const idsToDelete = rowIds ?? (row ? [row.id] : [])
|
||||
|
||||
try {
|
||||
if (idsToDelete.length === 1) {
|
||||
const res = await fetch(`/api/table/${table?.id}/rows/${idsToDelete[0]}`, {
|
||||
method: 'DELETE',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const result: { error?: string } = await res.json()
|
||||
throw new Error(result.error || 'Failed to delete row')
|
||||
}
|
||||
} else {
|
||||
const results = await Promise.allSettled(
|
||||
idsToDelete.map(async (rowId) => {
|
||||
const res = await fetch(`/api/table/${table?.id}/rows/${rowId}`, {
|
||||
method: 'DELETE',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const result: { error?: string } = await res.json().catch(() => ({}))
|
||||
throw new Error(result.error || `Failed to delete row ${rowId}`)
|
||||
}
|
||||
|
||||
return rowId
|
||||
})
|
||||
)
|
||||
|
||||
const failures = results.filter((r) => r.status === 'rejected')
|
||||
|
||||
if (failures.length > 0) {
|
||||
const failureCount = failures.length
|
||||
const totalCount = idsToDelete.length
|
||||
const successCount = totalCount - failureCount
|
||||
const firstError =
|
||||
failures[0].status === 'rejected' ? failures[0].reason?.message || 'Unknown error' : ''
|
||||
|
||||
throw new Error(
|
||||
`Failed to delete ${failureCount} of ${totalCount} row(s)${successCount > 0 ? ` (${successCount} deleted successfully)` : ''}. ${firstError}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
onSuccess()
|
||||
} catch (err) {
|
||||
logger.error('Failed to delete row(s):', err)
|
||||
setError(err instanceof Error ? err.message : 'Failed to delete row(s)')
|
||||
} finally {
|
||||
setIsSubmitting(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleClose = () => {
|
||||
setRowData({})
|
||||
setError(null)
|
||||
onClose()
|
||||
}
|
||||
|
||||
// Delete mode UI
|
||||
if (mode === 'delete') {
|
||||
const deleteCount = rowIds?.length ?? (row ? 1 : 0)
|
||||
const isSingleRow = deleteCount === 1
|
||||
|
||||
return (
|
||||
<Modal open={isOpen} onOpenChange={handleClose}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete {isSingleRow ? 'Row' : `${deleteCount} Rows`}</ModalHeader>
|
||||
<ModalBody>
|
||||
<ErrorMessage error={error} />
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>
|
||||
{isSingleRow ? '1 row' : `${deleteCount} rows`}
|
||||
</span>
|
||||
? This will permanently remove the data.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={handleClose} disabled={isSubmitting}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDelete} disabled={isSubmitting}>
|
||||
{isSubmitting ? 'Deleting...' : 'Delete'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
const isAddMode = mode === 'add'
|
||||
|
||||
return (
|
||||
<Modal open={isOpen} onOpenChange={handleClose}>
|
||||
<ModalContent className='max-w-[480px]'>
|
||||
<ModalHeader>{isAddMode ? 'Add New Row' : 'Edit Row'}</ModalHeader>
|
||||
<ModalBody className='max-h-[60vh] space-y-[12px] overflow-y-auto'>
|
||||
<ErrorMessage error={error} />
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{columns.map((column) => (
|
||||
<ColumnField
|
||||
key={column.name}
|
||||
column={column}
|
||||
value={rowData[column.name]}
|
||||
onChange={(value) => setRowData((prev) => ({ ...prev, [column.name]: value }))}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={handleClose} disabled={isSubmitting}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={handleFormSubmit}
|
||||
disabled={isSubmitting || !hasRequiredFields}
|
||||
>
|
||||
{isSubmitting
|
||||
? isAddMode
|
||||
? 'Adding...'
|
||||
: 'Updating...'
|
||||
: isAddMode
|
||||
? 'Add Row'
|
||||
: 'Update Row'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
function ErrorMessage({ error }: { error: string | null }) {
|
||||
if (!error) return null
|
||||
|
||||
return (
|
||||
<div className='rounded-[8px] border border-[var(--status-error-border)] bg-[var(--status-error-bg)] px-[14px] py-[12px] text-[13px] text-[var(--status-error-text)]'>
|
||||
{error}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface ColumnFieldProps {
|
||||
column: ColumnDefinition
|
||||
value: unknown
|
||||
onChange: (value: unknown) => void
|
||||
}
|
||||
|
||||
function ColumnField({ column, value, onChange }: ColumnFieldProps) {
|
||||
const renderInput = () => {
|
||||
if (column.type === 'boolean') {
|
||||
return (
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Checkbox
|
||||
id={column.name}
|
||||
checked={Boolean(value)}
|
||||
onCheckedChange={(checked) => onChange(checked === true)}
|
||||
/>
|
||||
<Label
|
||||
htmlFor={column.name}
|
||||
className='font-normal text-[13px] text-[var(--text-tertiary)]'
|
||||
>
|
||||
{value ? 'True' : 'False'}
|
||||
</Label>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (column.type === 'json') {
|
||||
return (
|
||||
<Textarea
|
||||
id={column.name}
|
||||
value={formatValueForInput(value, column.type)}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder='{"key": "value"}'
|
||||
rows={3}
|
||||
className='font-mono text-[12px]'
|
||||
required={column.required}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
id={column.name}
|
||||
type={column.type === 'number' ? 'number' : column.type === 'date' ? 'date' : 'text'}
|
||||
value={formatValueForInput(value, column.type)}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={`Enter ${column.name}`}
|
||||
required={column.required}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='overflow-hidden rounded-[4px] border border-[var(--border-1)]'>
|
||||
<div className='flex items-center justify-between bg-[var(--surface-4)] px-[10px] py-[5px]'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{column.name}
|
||||
{column.required && <span className='text-[var(--text-error)]'> *</span>}
|
||||
</span>
|
||||
<Badge size='sm'>{column.type}</Badge>
|
||||
{column.unique && (
|
||||
<Badge size='sm' variant='gray-secondary'>
|
||||
unique
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Value</Label>
|
||||
{renderInput()}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from '@/components/emcn'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
|
||||
interface SchemaModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
columns: ColumnDefinition[]
|
||||
}
|
||||
|
||||
export function SchemaModal({ isOpen, onClose, columns }: SchemaModalProps) {
|
||||
return (
|
||||
<Modal open={isOpen} onOpenChange={onClose}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Table Schema</ModalHeader>
|
||||
<ModalBody>
|
||||
<div className='max-h-[400px] overflow-auto'>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead className='w-[180px]'>Column</TableHead>
|
||||
<TableHead className='w-[100px]'>Type</TableHead>
|
||||
<TableHead>Constraints</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{columns.map((column) => (
|
||||
<TableRow key={column.name}>
|
||||
<TableCell>{column.name}</TableCell>
|
||||
<TableCell>
|
||||
<Badge variant='gray-secondary' size='sm'>
|
||||
{column.type}
|
||||
</Badge>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className='flex gap-[6px]'>
|
||||
{column.required && (
|
||||
<Badge variant='gray-secondary' size='sm'>
|
||||
required
|
||||
</Badge>
|
||||
)}
|
||||
{column.unique && (
|
||||
<Badge variant='gray-secondary' size='sm'>
|
||||
unique
|
||||
</Badge>
|
||||
)}
|
||||
{!column.required && !column.unique && (
|
||||
<span className='text-[var(--text-muted)]'>—</span>
|
||||
)}
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={() => onClose(false)}>
|
||||
Close
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
'use client'
|
||||
|
||||
import {
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
Filter,
|
||||
MoreHorizontal,
|
||||
Plus,
|
||||
RefreshCw,
|
||||
Trash2,
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
|
||||
interface TableToolbarProps {
|
||||
tableName: string
|
||||
totalCount: number
|
||||
isLoading: boolean
|
||||
onNavigateBack: () => void
|
||||
onShowSchema: () => void
|
||||
onRefresh: () => void
|
||||
showFilters: boolean
|
||||
onToggleFilters: () => void
|
||||
onAddRecord: () => void
|
||||
selectedCount: number
|
||||
onDeleteSelected: () => void
|
||||
onClearSelection: () => void
|
||||
hasPendingChanges: boolean
|
||||
onSaveChanges: () => void
|
||||
onDiscardChanges: () => void
|
||||
isSaving: boolean
|
||||
currentPage: number
|
||||
totalPages: number
|
||||
onPreviousPage: () => void
|
||||
onNextPage: () => void
|
||||
}
|
||||
|
||||
export function TableToolbar({
|
||||
tableName,
|
||||
totalCount,
|
||||
isLoading,
|
||||
onNavigateBack,
|
||||
onShowSchema,
|
||||
onRefresh,
|
||||
showFilters,
|
||||
onToggleFilters,
|
||||
onAddRecord,
|
||||
selectedCount,
|
||||
onDeleteSelected,
|
||||
onClearSelection,
|
||||
hasPendingChanges,
|
||||
onSaveChanges,
|
||||
onDiscardChanges,
|
||||
isSaving,
|
||||
currentPage,
|
||||
totalPages,
|
||||
onPreviousPage,
|
||||
onNextPage,
|
||||
}: TableToolbarProps) {
|
||||
const hasSelection = selectedCount > 0
|
||||
|
||||
return (
|
||||
<div className='flex h-[48px] shrink-0 items-center justify-between border-[var(--border)] border-b bg-[var(--surface-2)] px-[16px]'>
|
||||
{/* Left section: Navigation and table info */}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<button
|
||||
onClick={onNavigateBack}
|
||||
className='text-[13px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
|
||||
>
|
||||
Tables
|
||||
</button>
|
||||
<span className='text-[var(--text-muted)]'>/</span>
|
||||
<span className='font-medium text-[13px] text-[var(--text-primary)]'>{tableName}</span>
|
||||
</div>
|
||||
|
||||
{/* Center section: Main actions */}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{/* Pagination controls */}
|
||||
<div className='flex items-center gap-[2px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={onPreviousPage}
|
||||
disabled={currentPage === 0 || isLoading}
|
||||
>
|
||||
<ChevronLeft className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Previous page</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={onNextPage}
|
||||
disabled={currentPage >= totalPages - 1 || isLoading}
|
||||
>
|
||||
<ChevronRight className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Next page</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<div className='mx-[4px] h-[20px] w-[1px] bg-[var(--border)]' />
|
||||
|
||||
{/* Filters toggle */}
|
||||
<Button variant={showFilters ? 'secondary' : 'ghost'} size='sm' onClick={onToggleFilters}>
|
||||
<Filter className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Filters
|
||||
</Button>
|
||||
|
||||
<div className='mx-[4px] h-[20px] w-[1px] bg-[var(--border)]' />
|
||||
|
||||
{/* Pending changes actions */}
|
||||
{hasPendingChanges ? (
|
||||
<>
|
||||
<Button variant='tertiary' size='sm' onClick={onSaveChanges} disabled={isSaving}>
|
||||
{isSaving ? 'Saving...' : 'Save changes'}
|
||||
</Button>
|
||||
<Button variant='ghost' size='sm' onClick={onDiscardChanges} disabled={isSaving}>
|
||||
Discard changes
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Add record */}
|
||||
<Button variant='default' size='sm' onClick={onAddRecord}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add record
|
||||
</Button>
|
||||
|
||||
{/* Delete selected */}
|
||||
{hasSelection && (
|
||||
<Button variant='destructive' size='sm' onClick={onDeleteSelected}>
|
||||
<Trash2 className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Delete {selectedCount} {selectedCount === 1 ? 'record' : 'records'}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Clear selection */}
|
||||
{hasSelection && !hasPendingChanges && (
|
||||
<Button variant='ghost' size='sm' onClick={onClearSelection}>
|
||||
Clear selection
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Right section: Row count and utilities */}
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
{isLoading ? (
|
||||
<Skeleton className='h-[16px] w-[50px]' />
|
||||
) : (
|
||||
<span className='text-[13px] text-[var(--text-tertiary)]'>
|
||||
{totalCount} {totalCount === 1 ? 'row' : 'rows'}
|
||||
</span>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button variant='ghost' size='sm' onClick={onRefresh} disabled={isLoading}>
|
||||
<RefreshCw className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Refresh</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='ghost' size='sm'>
|
||||
<MoreHorizontal className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' className='w-[160px]'>
|
||||
<PopoverItem onClick={onShowSchema}>View Schema</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,331 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Checkbox,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useContextMenu, useInlineEditing, useRowSelection, useTableData } from '../hooks'
|
||||
import type { CellViewerData, QueryOptions } from '../lib/types'
|
||||
import { EmptyRows, LoadingRows } from './body-states'
|
||||
import { CellViewerModal } from './cell-viewer-modal'
|
||||
import { ContextMenu } from './context-menu'
|
||||
import { EditableCell } from './editable-cell'
|
||||
import { EditableRow } from './editable-row'
|
||||
import { FilterPanel } from './filter-panel'
|
||||
import { RowModal } from './row-modal'
|
||||
import { SchemaModal } from './schema-modal'
|
||||
import { TableToolbar } from './table-toolbar'
|
||||
|
||||
export function TableViewer() {
|
||||
const params = useParams()
|
||||
const router = useRouter()
|
||||
|
||||
const workspaceId = params.workspaceId as string
|
||||
const tableId = params.tableId as string
|
||||
|
||||
const [queryOptions, setQueryOptions] = useState<QueryOptions>({
|
||||
filter: null,
|
||||
sort: null,
|
||||
})
|
||||
const [currentPage, setCurrentPage] = useState(0)
|
||||
const [showFilters, setShowFilters] = useState(false)
|
||||
const [deletingRows, setDeletingRows] = useState<string[]>([])
|
||||
const [showSchemaModal, setShowSchemaModal] = useState(false)
|
||||
|
||||
const [cellViewer, setCellViewer] = useState<CellViewerData | null>(null)
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const { tableData, isLoadingTable, rows, totalCount, totalPages, isLoadingRows, refetchRows } =
|
||||
useTableData({
|
||||
workspaceId,
|
||||
tableId,
|
||||
queryOptions,
|
||||
currentPage,
|
||||
})
|
||||
|
||||
const columns = tableData?.schema?.columns || []
|
||||
|
||||
const { selectedRows, handleSelectAll, handleSelectRow, clearSelection } = useRowSelection(rows)
|
||||
|
||||
const { contextMenu, handleRowContextMenu, closeContextMenu } = useContextMenu()
|
||||
|
||||
const {
|
||||
newRows,
|
||||
pendingChanges,
|
||||
addNewRow,
|
||||
updateNewRowCell,
|
||||
updateExistingRowCell,
|
||||
saveChanges,
|
||||
discardChanges,
|
||||
hasPendingChanges,
|
||||
isSaving,
|
||||
} = useInlineEditing({
|
||||
workspaceId,
|
||||
tableId,
|
||||
columns,
|
||||
onSuccess: refetchRows,
|
||||
})
|
||||
|
||||
const selectedCount = selectedRows.size
|
||||
const hasSelection = selectedCount > 0
|
||||
const isAllSelected = rows.length > 0 && selectedCount === rows.length
|
||||
|
||||
const handleNavigateBack = useCallback(() => {
|
||||
router.push(`/workspace/${workspaceId}/tables`)
|
||||
}, [router, workspaceId])
|
||||
|
||||
const handleShowSchema = useCallback(() => {
|
||||
setShowSchemaModal(true)
|
||||
}, [])
|
||||
|
||||
const handleToggleFilters = useCallback(() => {
|
||||
setShowFilters((prev) => !prev)
|
||||
}, [])
|
||||
|
||||
const handleApplyQueryOptions = useCallback(
|
||||
(options: QueryOptions) => {
|
||||
setQueryOptions(options)
|
||||
setCurrentPage(0)
|
||||
refetchRows()
|
||||
},
|
||||
[refetchRows]
|
||||
)
|
||||
|
||||
const handleDeleteSelected = useCallback(() => {
|
||||
setDeletingRows(Array.from(selectedRows))
|
||||
}, [selectedRows])
|
||||
|
||||
const handleContextMenuEdit = useCallback(() => {
|
||||
// For inline editing, we don't need the modal anymore
|
||||
// The cell becomes editable on click
|
||||
closeContextMenu()
|
||||
}, [closeContextMenu])
|
||||
|
||||
const handleContextMenuDelete = useCallback(() => {
|
||||
if (contextMenu.row) {
|
||||
setDeletingRows([contextMenu.row.id])
|
||||
}
|
||||
closeContextMenu()
|
||||
}, [contextMenu.row, closeContextMenu])
|
||||
|
||||
const handleCopyCellValue = useCallback(async () => {
|
||||
if (cellViewer) {
|
||||
let text: string
|
||||
if (cellViewer.type === 'json') {
|
||||
text = JSON.stringify(cellViewer.value, null, 2)
|
||||
} else if (cellViewer.type === 'date') {
|
||||
text = String(cellViewer.value)
|
||||
} else {
|
||||
text = String(cellViewer.value)
|
||||
}
|
||||
await navigator.clipboard.writeText(text)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
}, [cellViewer])
|
||||
|
||||
const handleCellClick = useCallback(
|
||||
(columnName: string, value: unknown, type: CellViewerData['type']) => {
|
||||
setCellViewer({ columnName, value, type })
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const handleRemoveNewRow = useCallback(
|
||||
(tempId: string) => {
|
||||
discardChanges()
|
||||
},
|
||||
[discardChanges]
|
||||
)
|
||||
|
||||
const handlePreviousPage = useCallback(() => {
|
||||
setCurrentPage((p) => Math.max(0, p - 1))
|
||||
}, [])
|
||||
|
||||
const handleNextPage = useCallback(() => {
|
||||
setCurrentPage((p) => Math.min(totalPages - 1, p + 1))
|
||||
}, [totalPages])
|
||||
|
||||
if (isLoadingTable) {
|
||||
return (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<span className='text-[13px] text-[var(--text-tertiary)]'>Loading table...</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!tableData) {
|
||||
return (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<span className='text-[13px] text-[var(--text-error)]'>Table not found</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
<TableToolbar
|
||||
tableName={tableData.name}
|
||||
totalCount={totalCount}
|
||||
isLoading={isLoadingRows}
|
||||
onNavigateBack={handleNavigateBack}
|
||||
onShowSchema={handleShowSchema}
|
||||
onRefresh={refetchRows}
|
||||
showFilters={showFilters}
|
||||
onToggleFilters={handleToggleFilters}
|
||||
onAddRecord={addNewRow}
|
||||
selectedCount={selectedCount}
|
||||
onDeleteSelected={handleDeleteSelected}
|
||||
onClearSelection={clearSelection}
|
||||
hasPendingChanges={hasPendingChanges}
|
||||
onSaveChanges={saveChanges}
|
||||
onDiscardChanges={discardChanges}
|
||||
isSaving={isSaving}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onPreviousPage={handlePreviousPage}
|
||||
onNextPage={handleNextPage}
|
||||
/>
|
||||
|
||||
<FilterPanel
|
||||
columns={columns}
|
||||
isVisible={showFilters}
|
||||
onApply={handleApplyQueryOptions}
|
||||
onClose={() => setShowFilters(false)}
|
||||
isLoading={isLoadingRows}
|
||||
/>
|
||||
|
||||
<div className='flex-1 overflow-auto'>
|
||||
<Table>
|
||||
<TableHeader className='sticky top-0 z-10 bg-[var(--surface-3)]'>
|
||||
<TableRow>
|
||||
<TableHead className='w-[40px]'>
|
||||
<Checkbox size='sm' checked={isAllSelected} onCheckedChange={handleSelectAll} />
|
||||
</TableHead>
|
||||
{columns.map((column) => (
|
||||
<TableHead key={column.name}>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<span className='text-[12px]'>{column.name}</span>
|
||||
<Badge variant='outline' size='sm'>
|
||||
{column.type}
|
||||
</Badge>
|
||||
{column.required && (
|
||||
<span className='text-[10px] text-[var(--text-error)]'>*</span>
|
||||
)}
|
||||
</div>
|
||||
</TableHead>
|
||||
))}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{/* New rows being added */}
|
||||
{newRows.map((newRow) => (
|
||||
<EditableRow
|
||||
key={newRow.tempId}
|
||||
row={newRow}
|
||||
columns={columns}
|
||||
onUpdateCell={updateNewRowCell}
|
||||
onRemove={handleRemoveNewRow}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Loading state */}
|
||||
{isLoadingRows ? (
|
||||
<LoadingRows columns={columns} />
|
||||
) : rows.length === 0 && newRows.length === 0 ? (
|
||||
<EmptyRows
|
||||
columnCount={columns.length}
|
||||
hasFilter={!!queryOptions.filter}
|
||||
onAddRow={addNewRow}
|
||||
/>
|
||||
) : (
|
||||
/* Existing rows with inline editing */
|
||||
rows.map((row) => {
|
||||
const rowChanges = pendingChanges.get(row.id)
|
||||
const hasChanges = !!rowChanges
|
||||
|
||||
return (
|
||||
<TableRow
|
||||
key={row.id}
|
||||
className={cn(
|
||||
'group hover:bg-[var(--surface-4)]',
|
||||
selectedRows.has(row.id) && 'bg-[var(--surface-5)]',
|
||||
hasChanges && 'bg-amber-500/10'
|
||||
)}
|
||||
onContextMenu={(e) => handleRowContextMenu(e, row)}
|
||||
>
|
||||
<TableCell>
|
||||
<Checkbox
|
||||
size='sm'
|
||||
checked={selectedRows.has(row.id)}
|
||||
onCheckedChange={() => handleSelectRow(row.id)}
|
||||
/>
|
||||
</TableCell>
|
||||
{columns.map((column) => {
|
||||
const currentValue = rowChanges?.[column.name] ?? row.data[column.name]
|
||||
|
||||
return (
|
||||
<TableCell key={column.name}>
|
||||
<EditableCell
|
||||
value={currentValue}
|
||||
column={column}
|
||||
onChange={(value) => updateExistingRowCell(row.id, column.name, value)}
|
||||
/>
|
||||
</TableCell>
|
||||
)
|
||||
})}
|
||||
</TableRow>
|
||||
)
|
||||
})
|
||||
)}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
|
||||
{/* Delete confirmation modal */}
|
||||
{deletingRows.length > 0 && (
|
||||
<RowModal
|
||||
mode='delete'
|
||||
isOpen={true}
|
||||
onClose={() => setDeletingRows([])}
|
||||
table={tableData}
|
||||
rowIds={deletingRows}
|
||||
onSuccess={() => {
|
||||
refetchRows()
|
||||
setDeletingRows([])
|
||||
clearSelection()
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<SchemaModal
|
||||
isOpen={showSchemaModal}
|
||||
onClose={() => setShowSchemaModal(false)}
|
||||
columns={columns}
|
||||
/>
|
||||
|
||||
<CellViewerModal
|
||||
cellViewer={cellViewer}
|
||||
onClose={() => setCellViewer(null)}
|
||||
onCopy={handleCopyCellValue}
|
||||
copied={copied}
|
||||
/>
|
||||
|
||||
<ContextMenu
|
||||
contextMenu={contextMenu}
|
||||
onClose={closeContextMenu}
|
||||
onEdit={handleContextMenuEdit}
|
||||
onDelete={handleContextMenuDelete}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export * from './use-context-menu'
|
||||
export * from './use-inline-editing'
|
||||
export * from './use-row-selection'
|
||||
export * from './use-table-data'
|
||||
@@ -0,0 +1,37 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import type { TableRow } from '@/lib/table'
|
||||
import type { ContextMenuState } from '../lib/types'
|
||||
|
||||
interface UseContextMenuReturn {
|
||||
contextMenu: ContextMenuState
|
||||
handleRowContextMenu: (e: React.MouseEvent, row: TableRow) => void
|
||||
closeContextMenu: () => void
|
||||
}
|
||||
|
||||
export function useContextMenu(): UseContextMenuReturn {
|
||||
const [contextMenu, setContextMenu] = useState<ContextMenuState>({
|
||||
isOpen: false,
|
||||
position: { x: 0, y: 0 },
|
||||
row: null,
|
||||
})
|
||||
|
||||
const handleRowContextMenu = useCallback((e: React.MouseEvent, row: TableRow) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setContextMenu({
|
||||
isOpen: true,
|
||||
position: { x: e.clientX, y: e.clientY },
|
||||
row,
|
||||
})
|
||||
}, [])
|
||||
|
||||
const closeContextMenu = useCallback(() => {
|
||||
setContextMenu((prev) => ({ ...prev, isOpen: false }))
|
||||
}, [])
|
||||
|
||||
return {
|
||||
contextMenu,
|
||||
handleRowContextMenu,
|
||||
closeContextMenu,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
|
||||
const logger = createLogger('useInlineEditing')
|
||||
|
||||
export interface TempRow {
|
||||
tempId: string
|
||||
data: Record<string, unknown>
|
||||
isNew: true
|
||||
}
|
||||
|
||||
interface UseInlineEditingProps {
|
||||
workspaceId: string
|
||||
tableId: string
|
||||
columns: ColumnDefinition[]
|
||||
onSuccess: () => void
|
||||
}
|
||||
|
||||
interface UseInlineEditingReturn {
|
||||
newRows: TempRow[]
|
||||
pendingChanges: Map<string, Record<string, unknown>>
|
||||
addNewRow: () => void
|
||||
updateNewRowCell: (tempId: string, column: string, value: unknown) => void
|
||||
updateExistingRowCell: (rowId: string, column: string, value: unknown) => void
|
||||
saveChanges: () => Promise<void>
|
||||
discardChanges: () => void
|
||||
hasPendingChanges: boolean
|
||||
isSaving: boolean
|
||||
error: string | null
|
||||
}
|
||||
|
||||
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
|
||||
const initial: Record<string, unknown> = {}
|
||||
columns.forEach((col) => {
|
||||
if (col.type === 'boolean') {
|
||||
initial[col.name] = false
|
||||
} else {
|
||||
initial[col.name] = null
|
||||
}
|
||||
})
|
||||
return initial
|
||||
}
|
||||
|
||||
function cleanRowData(
|
||||
columns: ColumnDefinition[],
|
||||
rowData: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const cleanData: Record<string, unknown> = {}
|
||||
|
||||
columns.forEach((col) => {
|
||||
const value = rowData[col.name]
|
||||
if (col.type === 'number') {
|
||||
cleanData[col.name] = value === '' || value === null ? null : Number(value)
|
||||
} else if (col.type === 'json') {
|
||||
if (typeof value === 'string') {
|
||||
if (value === '') {
|
||||
cleanData[col.name] = null
|
||||
} else {
|
||||
try {
|
||||
cleanData[col.name] = JSON.parse(value)
|
||||
} catch {
|
||||
throw new Error(`Invalid JSON for field: ${col.name}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cleanData[col.name] = value
|
||||
}
|
||||
} else if (col.type === 'boolean') {
|
||||
cleanData[col.name] = Boolean(value)
|
||||
} else {
|
||||
cleanData[col.name] = value || null
|
||||
}
|
||||
})
|
||||
|
||||
return cleanData
|
||||
}
|
||||
|
||||
export function useInlineEditing({
|
||||
workspaceId,
|
||||
tableId,
|
||||
columns,
|
||||
onSuccess,
|
||||
}: UseInlineEditingProps): UseInlineEditingReturn {
|
||||
const [newRows, setNewRows] = useState<TempRow[]>([])
|
||||
const [pendingChanges, setPendingChanges] = useState<Map<string, Record<string, unknown>>>(
|
||||
new Map()
|
||||
)
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const hasPendingChanges = newRows.length > 0 || pendingChanges.size > 0
|
||||
|
||||
const addNewRow = useCallback(() => {
|
||||
const newRow: TempRow = {
|
||||
tempId: `temp-${nanoid()}`,
|
||||
data: createInitialRowData(columns),
|
||||
isNew: true,
|
||||
}
|
||||
setNewRows((prev) => [newRow, ...prev])
|
||||
}, [columns])
|
||||
|
||||
const updateNewRowCell = useCallback((tempId: string, column: string, value: unknown) => {
|
||||
setNewRows((prev) =>
|
||||
prev.map((row) =>
|
||||
row.tempId === tempId ? { ...row, data: { ...row.data, [column]: value } } : row
|
||||
)
|
||||
)
|
||||
}, [])
|
||||
|
||||
const updateExistingRowCell = useCallback((rowId: string, column: string, value: unknown) => {
|
||||
setPendingChanges((prev) => {
|
||||
const newMap = new Map(prev)
|
||||
const existing = newMap.get(rowId) || {}
|
||||
newMap.set(rowId, { ...existing, [column]: value })
|
||||
return newMap
|
||||
})
|
||||
}, [])
|
||||
|
||||
const saveChanges = useCallback(async () => {
|
||||
setIsSaving(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
// Save new rows
|
||||
for (const newRow of newRows) {
|
||||
const cleanData = cleanRowData(columns, newRow.data)
|
||||
|
||||
const res = await fetch(`/api/table/${tableId}/rows`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId, data: cleanData }),
|
||||
})
|
||||
|
||||
const result: { error?: string } = await res.json()
|
||||
if (!res.ok) {
|
||||
throw new Error(result.error || 'Failed to add row')
|
||||
}
|
||||
}
|
||||
|
||||
// Save edited rows
|
||||
for (const [rowId, changes] of pendingChanges.entries()) {
|
||||
const cleanData = cleanRowData(columns, changes)
|
||||
|
||||
const res = await fetch(`/api/table/${tableId}/rows/${rowId}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workspaceId, data: cleanData }),
|
||||
})
|
||||
|
||||
const result: { error?: string } = await res.json()
|
||||
if (!res.ok) {
|
||||
throw new Error(result.error || 'Failed to update row')
|
||||
}
|
||||
}
|
||||
|
||||
// Clear state and refresh
|
||||
setNewRows([])
|
||||
setPendingChanges(new Map())
|
||||
onSuccess()
|
||||
|
||||
logger.info('Changes saved successfully')
|
||||
} catch (err) {
|
||||
logger.error('Failed to save changes:', err)
|
||||
setError(err instanceof Error ? err.message : 'Failed to save changes')
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}, [newRows, pendingChanges, columns, tableId, workspaceId, onSuccess])
|
||||
|
||||
const discardChanges = useCallback(() => {
|
||||
setNewRows([])
|
||||
setPendingChanges(new Map())
|
||||
setError(null)
|
||||
}, [])
|
||||
|
||||
return {
|
||||
newRows,
|
||||
pendingChanges,
|
||||
addNewRow,
|
||||
updateNewRowCell,
|
||||
updateExistingRowCell,
|
||||
saveChanges,
|
||||
discardChanges,
|
||||
hasPendingChanges,
|
||||
isSaving,
|
||||
error,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import type { TableRow } from '@/lib/table'
|
||||
|
||||
interface UseRowSelectionReturn {
|
||||
selectedRows: Set<string>
|
||||
handleSelectAll: () => void
|
||||
handleSelectRow: (rowId: string) => void
|
||||
clearSelection: () => void
|
||||
}
|
||||
|
||||
export function useRowSelection(rows: TableRow[]): UseRowSelectionReturn {
|
||||
const [selectedRows, setSelectedRows] = useState<Set<string>>(new Set())
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedRows((prev) => {
|
||||
if (prev.size === 0) return prev
|
||||
|
||||
const currentRowIds = new Set(rows.map((r) => r.id))
|
||||
const filtered = new Set([...prev].filter((id) => currentRowIds.has(id)))
|
||||
|
||||
// Only update state if something was actually filtered out
|
||||
return filtered.size !== prev.size ? filtered : prev
|
||||
})
|
||||
}, [rows])
|
||||
|
||||
const handleSelectAll = useCallback(() => {
|
||||
if (selectedRows.size === rows.length) {
|
||||
setSelectedRows(new Set())
|
||||
} else {
|
||||
setSelectedRows(new Set(rows.map((r) => r.id)))
|
||||
}
|
||||
}, [rows, selectedRows.size])
|
||||
|
||||
const handleSelectRow = useCallback((rowId: string) => {
|
||||
setSelectedRows((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
if (newSet.has(rowId)) {
|
||||
newSet.delete(rowId)
|
||||
} else {
|
||||
newSet.add(rowId)
|
||||
}
|
||||
return newSet
|
||||
})
|
||||
}, [])
|
||||
|
||||
const clearSelection = useCallback(() => {
|
||||
setSelectedRows(new Set())
|
||||
}, [])
|
||||
|
||||
return {
|
||||
selectedRows,
|
||||
handleSelectAll,
|
||||
handleSelectRow,
|
||||
clearSelection,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import type { TableDefinition, TableRow } from '@/lib/table'
|
||||
import { ROWS_PER_PAGE } from '../lib/constants'
|
||||
import type { QueryOptions } from '../lib/types'
|
||||
|
||||
interface UseTableDataParams {
|
||||
workspaceId: string
|
||||
tableId: string
|
||||
queryOptions: QueryOptions
|
||||
currentPage: number
|
||||
}
|
||||
|
||||
interface UseTableDataReturn {
|
||||
tableData: TableDefinition | undefined
|
||||
isLoadingTable: boolean
|
||||
rows: TableRow[]
|
||||
totalCount: number
|
||||
totalPages: number
|
||||
isLoadingRows: boolean
|
||||
refetchRows: () => void
|
||||
}
|
||||
|
||||
export function useTableData({
|
||||
workspaceId,
|
||||
tableId,
|
||||
queryOptions,
|
||||
currentPage,
|
||||
}: UseTableDataParams): UseTableDataReturn {
|
||||
const { data: tableData, isLoading: isLoadingTable } = useQuery({
|
||||
queryKey: ['table', tableId],
|
||||
queryFn: async () => {
|
||||
const res = await fetch(`/api/table/${tableId}?workspaceId=${workspaceId}`)
|
||||
if (!res.ok) throw new Error('Failed to fetch table')
|
||||
const json: { data?: { table: TableDefinition }; table?: TableDefinition } = await res.json()
|
||||
const data = json.data || json
|
||||
return (data as { table: TableDefinition }).table
|
||||
},
|
||||
})
|
||||
|
||||
const {
|
||||
data: rowsData,
|
||||
isLoading: isLoadingRows,
|
||||
refetch: refetchRows,
|
||||
} = useQuery({
|
||||
queryKey: ['table-rows', tableId, queryOptions, currentPage],
|
||||
queryFn: async () => {
|
||||
const searchParams = new URLSearchParams({
|
||||
workspaceId,
|
||||
limit: String(ROWS_PER_PAGE),
|
||||
offset: String(currentPage * ROWS_PER_PAGE),
|
||||
})
|
||||
|
||||
if (queryOptions.filter) {
|
||||
searchParams.set('filter', JSON.stringify(queryOptions.filter))
|
||||
}
|
||||
|
||||
if (queryOptions.sort) {
|
||||
searchParams.set('sort', JSON.stringify(queryOptions.sort))
|
||||
}
|
||||
|
||||
const res = await fetch(`/api/table/${tableId}/rows?${searchParams}`)
|
||||
if (!res.ok) throw new Error('Failed to fetch rows')
|
||||
const json: {
|
||||
data?: { rows: TableRow[]; totalCount: number }
|
||||
rows?: TableRow[]
|
||||
totalCount?: number
|
||||
} = await res.json()
|
||||
return json.data || json
|
||||
},
|
||||
enabled: !!tableData,
|
||||
})
|
||||
|
||||
const rows = (rowsData?.rows || []) as TableRow[]
|
||||
const totalCount = rowsData?.totalCount || 0
|
||||
const totalPages = Math.ceil(totalCount / ROWS_PER_PAGE)
|
||||
|
||||
return {
|
||||
tableData,
|
||||
isLoadingTable,
|
||||
rows,
|
||||
totalCount,
|
||||
totalPages,
|
||||
isLoadingRows,
|
||||
refetchRows,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
export const ROWS_PER_PAGE = 100
|
||||
export const STRING_TRUNCATE_LENGTH = 50
|
||||
@@ -0,0 +1,3 @@
|
||||
export * from './constants'
|
||||
export * from './types'
|
||||
export * from './utils'
|
||||
@@ -0,0 +1,27 @@
|
||||
import type { Filter, Sort, TableRow } from '@/lib/table'
|
||||
|
||||
/**
|
||||
* Query options for filtering and sorting table data
|
||||
*/
|
||||
export interface QueryOptions {
|
||||
filter: Filter | null
|
||||
sort: Sort | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Data for viewing a cell's full content in a modal
|
||||
*/
|
||||
export interface CellViewerData {
|
||||
columnName: string
|
||||
value: unknown
|
||||
type: 'json' | 'text' | 'date' | 'boolean' | 'number'
|
||||
}
|
||||
|
||||
/**
|
||||
* State for the row context menu (right-click)
|
||||
*/
|
||||
export interface ContextMenuState {
|
||||
isOpen: boolean
|
||||
position: { x: number; y: number }
|
||||
row: TableRow | null
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
type BadgeVariant = 'green' | 'blue' | 'purple' | 'orange' | 'teal' | 'gray'
|
||||
|
||||
/**
|
||||
* Returns the appropriate badge color variant for a column type
|
||||
*/
|
||||
export function getTypeBadgeVariant(type: string): BadgeVariant {
|
||||
switch (type) {
|
||||
case 'string':
|
||||
return 'green'
|
||||
case 'number':
|
||||
return 'blue'
|
||||
case 'boolean':
|
||||
return 'purple'
|
||||
case 'json':
|
||||
return 'orange'
|
||||
case 'date':
|
||||
return 'teal'
|
||||
default:
|
||||
return 'gray'
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
import { TableViewer } from './components'
|
||||
|
||||
export default function TablePage() {
|
||||
return <TableViewer />
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
export type SortOption = 'name' | 'createdAt' | 'updatedAt' | 'rowCount' | 'columnCount'
|
||||
export type SortOrder = 'asc' | 'desc'
|
||||
|
||||
export const SORT_OPTIONS = [
|
||||
{ value: 'updatedAt-desc', label: 'Last Updated' },
|
||||
{ value: 'createdAt-desc', label: 'Newest First' },
|
||||
{ value: 'createdAt-asc', label: 'Oldest First' },
|
||||
{ value: 'name-asc', label: 'Name (A-Z)' },
|
||||
{ value: 'name-desc', label: 'Name (Z-A)' },
|
||||
{ value: 'rowCount-desc', label: 'Most Rows' },
|
||||
{ value: 'rowCount-asc', label: 'Least Rows' },
|
||||
{ value: 'columnCount-desc', label: 'Most Columns' },
|
||||
{ value: 'columnCount-asc', label: 'Least Columns' },
|
||||
] as const
|
||||
@@ -0,0 +1,349 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Combobox,
|
||||
Input,
|
||||
Label,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Textarea,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import type { ColumnDefinition } from '@/lib/table'
|
||||
import { useCreateTable } from '@/hooks/queries/use-tables'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
interface CreateModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
}
|
||||
|
||||
const COLUMN_TYPE_OPTIONS: Array<{ value: ColumnDefinition['type']; label: string }> = [
|
||||
{ value: 'string', label: 'String' },
|
||||
{ value: 'number', label: 'Number' },
|
||||
{ value: 'boolean', label: 'Boolean' },
|
||||
{ value: 'date', label: 'Date' },
|
||||
{ value: 'json', label: 'JSON' },
|
||||
]
|
||||
|
||||
interface ColumnWithId extends ColumnDefinition {
|
||||
id: string
|
||||
}
|
||||
|
||||
function createEmptyColumn(): ColumnWithId {
|
||||
return { id: nanoid(), name: '', type: 'string', required: true, unique: false }
|
||||
}
|
||||
|
||||
export function CreateModal({ isOpen, onClose }: CreateModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const [tableName, setTableName] = useState('')
|
||||
const [description, setDescription] = useState('')
|
||||
const [columns, setColumns] = useState<ColumnWithId[]>([createEmptyColumn()])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const createTable = useCreateTable(workspaceId)
|
||||
|
||||
// Form validation
|
||||
const validColumns = useMemo(() => columns.filter((col) => col.name.trim()), [columns])
|
||||
const duplicateColumnNames = useMemo(() => {
|
||||
const names = validColumns.map((col) => col.name.toLowerCase())
|
||||
const seen = new Set<string>()
|
||||
const duplicates = new Set<string>()
|
||||
names.forEach((name) => {
|
||||
if (seen.has(name)) {
|
||||
duplicates.add(name)
|
||||
}
|
||||
seen.add(name)
|
||||
})
|
||||
return duplicates
|
||||
}, [validColumns])
|
||||
|
||||
const isFormValid = useMemo(() => {
|
||||
const hasTableName = tableName.trim().length > 0
|
||||
const hasAtLeastOneColumn = validColumns.length > 0
|
||||
const hasNoDuplicates = duplicateColumnNames.size === 0
|
||||
return hasTableName && hasAtLeastOneColumn && hasNoDuplicates
|
||||
}, [tableName, validColumns.length, duplicateColumnNames.size])
|
||||
|
||||
const handleAddColumn = () => {
|
||||
setColumns([...columns, createEmptyColumn()])
|
||||
}
|
||||
|
||||
const handleRemoveColumn = (columnId: string) => {
|
||||
if (columns.length > 1) {
|
||||
setColumns(columns.filter((col) => col.id !== columnId))
|
||||
}
|
||||
}
|
||||
|
||||
const handleColumnChange = (
|
||||
columnId: string,
|
||||
field: keyof ColumnDefinition,
|
||||
value: string | boolean
|
||||
) => {
|
||||
setColumns(columns.map((col) => (col.id === columnId ? { ...col, [field]: value } : col)))
|
||||
}
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
setError(null)
|
||||
|
||||
if (!tableName.trim()) {
|
||||
setError('Table name is required')
|
||||
return
|
||||
}
|
||||
|
||||
// Validate column names
|
||||
const validColumns = columns.filter((col) => col.name.trim())
|
||||
if (validColumns.length === 0) {
|
||||
setError('At least one column is required')
|
||||
return
|
||||
}
|
||||
|
||||
// Check for duplicate column names
|
||||
const columnNames = validColumns.map((col) => col.name.toLowerCase())
|
||||
const uniqueNames = new Set(columnNames)
|
||||
if (uniqueNames.size !== columnNames.length) {
|
||||
setError('Duplicate column names found')
|
||||
return
|
||||
}
|
||||
|
||||
// Strip internal IDs before sending to API
|
||||
const columnsForApi = validColumns.map(({ id: _id, ...col }) => col)
|
||||
|
||||
try {
|
||||
await createTable.mutateAsync({
|
||||
name: tableName,
|
||||
description: description || undefined,
|
||||
schema: {
|
||||
columns: columnsForApi,
|
||||
},
|
||||
})
|
||||
|
||||
// Reset form
|
||||
resetForm()
|
||||
onClose()
|
||||
} catch (err) {
|
||||
logger.error('Failed to create table:', err)
|
||||
setError(err instanceof Error ? err.message : 'Failed to create table')
|
||||
}
|
||||
}
|
||||
|
||||
const resetForm = () => {
|
||||
setTableName('')
|
||||
setDescription('')
|
||||
setColumns([createEmptyColumn()])
|
||||
setError(null)
|
||||
}
|
||||
|
||||
const handleClose = () => {
|
||||
resetForm()
|
||||
onClose()
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal open={isOpen} onOpenChange={handleClose}>
|
||||
<ModalContent size='lg'>
|
||||
<ModalHeader>Create New Table</ModalHeader>
|
||||
<ModalBody className='max-h-[70vh] overflow-y-auto'>
|
||||
<form onSubmit={handleSubmit} className='space-y-[12px]'>
|
||||
{error && (
|
||||
<div className='rounded-[4px] border border-destructive/30 bg-destructive/10 p-3 text-destructive text-sm'>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Table Name */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='tableName'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Table Name
|
||||
</Label>
|
||||
<Input
|
||||
id='tableName'
|
||||
value={tableName}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) => setTableName(e.target.value)}
|
||||
placeholder='e.g., customer_orders'
|
||||
className='h-9'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Description */}
|
||||
<div>
|
||||
<Label
|
||||
htmlFor='description'
|
||||
className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'
|
||||
>
|
||||
Description
|
||||
</Label>
|
||||
<Textarea
|
||||
id='description'
|
||||
value={description}
|
||||
onChange={(e: React.ChangeEvent<HTMLTextAreaElement>) =>
|
||||
setDescription(e.target.value)
|
||||
}
|
||||
placeholder='Optional description for this table'
|
||||
rows={2}
|
||||
className='resize-none'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Columns */}
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between pl-[2px]'>
|
||||
<Label className='font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Columns
|
||||
</Label>
|
||||
<Button type='button' size='sm' variant='default' onClick={handleAddColumn}>
|
||||
<Plus className='mr-1 h-3.5 w-3.5' />
|
||||
Add
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Column Headers */}
|
||||
<div className='mb-2 flex items-center gap-[10px] text-[11px] text-[var(--text-secondary)]'>
|
||||
<div className='flex-1 pl-3'>Name</div>
|
||||
<div className='w-[110px] pl-3'>Type</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div className='w-[70px] cursor-help text-center'>Required</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Field must have a value</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div className='w-[70px] cursor-help text-center'>Unique</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>No duplicate values allowed</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<div className='w-9' />
|
||||
</div>
|
||||
|
||||
{/* Column Rows */}
|
||||
<div className='flex flex-col gap-2'>
|
||||
{columns.map((column) => (
|
||||
<ColumnRow
|
||||
key={column.id}
|
||||
column={column}
|
||||
isRemovable={columns.length > 1}
|
||||
isDuplicate={duplicateColumnNames.has(column.name.toLowerCase())}
|
||||
onChange={handleColumnChange}
|
||||
onRemove={handleRemoveColumn}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
Mark columns as unique to prevent duplicate values (e.g., id, email)
|
||||
</p>
|
||||
</div>
|
||||
</form>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={handleClose} disabled={createTable.isPending}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
onClick={handleSubmit}
|
||||
disabled={createTable.isPending || !isFormValid}
|
||||
>
|
||||
{createTable.isPending ? 'Creating...' : 'Create'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
interface ColumnRowProps {
|
||||
column: ColumnWithId
|
||||
isRemovable: boolean
|
||||
isDuplicate: boolean
|
||||
onChange: (columnId: string, field: keyof ColumnDefinition, value: string | boolean) => void
|
||||
onRemove: (columnId: string) => void
|
||||
}
|
||||
|
||||
function ColumnRow({ column, isRemovable, isDuplicate, onChange, onRemove }: ColumnRowProps) {
|
||||
return (
|
||||
<div className='flex flex-col gap-1'>
|
||||
<div className='flex items-center gap-[10px]'>
|
||||
{/* Column Name */}
|
||||
<div className='flex-1'>
|
||||
<Input
|
||||
value={column.name}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||
onChange(column.id, 'name', e.target.value)
|
||||
}
|
||||
placeholder='column_name'
|
||||
className={`h-9 ${isDuplicate ? 'border-destructive focus-visible:ring-destructive' : ''}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Column Type */}
|
||||
<div className='w-[110px]'>
|
||||
<Combobox
|
||||
options={COLUMN_TYPE_OPTIONS}
|
||||
value={column.type}
|
||||
selectedValue={column.type}
|
||||
onChange={(value) => onChange(column.id, 'type', value as ColumnDefinition['type'])}
|
||||
placeholder='Type'
|
||||
editable={false}
|
||||
filterOptions={false}
|
||||
className='h-9'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Required Checkbox */}
|
||||
<div className='flex w-[70px] items-center justify-center'>
|
||||
<Checkbox
|
||||
checked={column.required}
|
||||
onCheckedChange={(checked) => onChange(column.id, 'required', checked === true)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Unique Checkbox */}
|
||||
<div className='flex w-[70px] items-center justify-center'>
|
||||
<Checkbox
|
||||
checked={column.unique}
|
||||
onCheckedChange={(checked) => onChange(column.id, 'unique', checked === true)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Delete Button */}
|
||||
<div className='w-9'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={() => onRemove(column.id)}
|
||||
disabled={!isRemovable}
|
||||
className='h-9 w-9 p-0'
|
||||
>
|
||||
<Trash />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Remove column</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
{isDuplicate && <p className='mt-1 pl-1 text-destructive text-sm'>Duplicate column name</p>}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
interface EmptyStateProps {
|
||||
hasSearchQuery: boolean
|
||||
}
|
||||
|
||||
export function EmptyState({ hasSearchQuery }: EmptyStateProps) {
|
||||
return (
|
||||
<div className='col-span-full flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{hasSearchQuery ? 'No tables found' : 'No tables yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{hasSearchQuery
|
||||
? 'Try adjusting your search query'
|
||||
: 'Create your first table to store structured data for your workflows'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
interface ErrorStateProps {
|
||||
error: unknown
|
||||
}
|
||||
|
||||
export function ErrorState({ error }: ErrorStateProps) {
|
||||
return (
|
||||
<div className='col-span-full flex h-64 items-center justify-center'>
|
||||
<div className='text-[var(--text-error)]'>
|
||||
<span className='text-[13px]'>
|
||||
Error: {error instanceof Error ? error.message : 'Failed to load tables'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
export * from './create-modal'
|
||||
export * from './empty-state'
|
||||
export * from './error-state'
|
||||
export * from './loading-state'
|
||||
export * from './table-card'
|
||||
export * from './table-card-context-menu'
|
||||
export * from './table-list-context-menu'
|
||||
export * from './tables-view'
|
||||
@@ -0,0 +1,31 @@
|
||||
export function LoadingState() {
|
||||
return (
|
||||
<>
|
||||
{Array.from({ length: 8 }).map((_, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className='flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] dark:bg-[var(--surface-4)]'
|
||||
>
|
||||
<div className='flex items-center justify-between gap-[8px]'>
|
||||
<div className='h-[17px] w-[120px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
<div className='h-[22px] w-[90px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
</div>
|
||||
<div className='flex flex-1 flex-col gap-[8px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-[12px]'>
|
||||
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
</div>
|
||||
<div className='h-[15px] w-[60px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
</div>
|
||||
<div className='h-0 w-full border-[var(--divider)] border-t' />
|
||||
<div className='flex h-[36px] flex-col gap-[6px]'>
|
||||
<div className='h-[15px] w-full animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
<div className='h-[15px] w-[75%] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
'use client'
|
||||
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface TableCardContextMenuProps {
|
||||
/**
|
||||
* Whether the context menu is open
|
||||
*/
|
||||
isOpen: boolean
|
||||
/**
|
||||
* Position of the context menu
|
||||
*/
|
||||
position: { x: number; y: number }
|
||||
/**
|
||||
* Ref for the menu element
|
||||
*/
|
||||
menuRef: React.RefObject<HTMLDivElement | null>
|
||||
/**
|
||||
* Callback when menu should close
|
||||
*/
|
||||
onClose: () => void
|
||||
/**
|
||||
* Callback when open in new tab is clicked
|
||||
*/
|
||||
onOpenInNewTab?: () => void
|
||||
/**
|
||||
* Callback when view schema is clicked
|
||||
*/
|
||||
onViewSchema?: () => void
|
||||
/**
|
||||
* Callback when copy ID is clicked
|
||||
*/
|
||||
onCopyId?: () => void
|
||||
/**
|
||||
* Callback when delete is clicked
|
||||
*/
|
||||
onDelete?: () => void
|
||||
/**
|
||||
* Whether to show the open in new tab option
|
||||
* @default true
|
||||
*/
|
||||
showOpenInNewTab?: boolean
|
||||
/**
|
||||
* Whether to show the view schema option
|
||||
* @default true
|
||||
*/
|
||||
showViewSchema?: boolean
|
||||
/**
|
||||
* Whether to show the delete option
|
||||
* @default true
|
||||
*/
|
||||
showDelete?: boolean
|
||||
/**
|
||||
* Whether the delete option is disabled
|
||||
* @default false
|
||||
*/
|
||||
disableDelete?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu component for table cards.
|
||||
* Displays open in new tab, view schema, copy ID, and delete options in a popover at the right-click position.
|
||||
*/
|
||||
export function TableCardContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onViewSchema,
|
||||
onCopyId,
|
||||
onDelete,
|
||||
showOpenInNewTab = true,
|
||||
showViewSchema = true,
|
||||
showDelete = true,
|
||||
disableDelete = false,
|
||||
}: TableCardContextMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={(open) => !open && onClose()}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Navigation */}
|
||||
{showOpenInNewTab && onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onOpenInNewTab()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{showOpenInNewTab && onOpenInNewTab && <PopoverDivider />}
|
||||
|
||||
{/* View and copy actions */}
|
||||
{showViewSchema && onViewSchema && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onViewSchema()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
View schema
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onCopyId && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onCopyId()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Copy ID
|
||||
</PopoverItem>
|
||||
)}
|
||||
{((showViewSchema && onViewSchema) || onCopyId) && <PopoverDivider />}
|
||||
|
||||
{/* Destructive action */}
|
||||
{showDelete && onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
onClick={() => {
|
||||
onDelete()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</PopoverItem>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,233 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Columns, Rows3 } from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import type { TableDefinition } from '@/lib/table'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { SchemaModal } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/schema-modal'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDeleteTable } from '@/hooks/queries/use-tables'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '../lib/utils'
|
||||
import { TableCardContextMenu } from './table-card-context-menu'
|
||||
|
||||
const logger = createLogger('TableCard')
|
||||
|
||||
interface TableCardProps {
|
||||
table: TableDefinition
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export function TableCard({ table, workspaceId }: TableCardProps) {
|
||||
const router = useRouter()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
|
||||
const [isSchemaModalOpen, setIsSchemaModalOpen] = useState(false)
|
||||
const menuButtonRef = useRef<HTMLButtonElement>(null)
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position: contextMenuPosition,
|
||||
menuRef,
|
||||
handleContextMenu,
|
||||
closeMenu: closeContextMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
const handleMenuButtonClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
if (menuButtonRef.current) {
|
||||
const rect = menuButtonRef.current.getBoundingClientRect()
|
||||
const syntheticEvent = {
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
clientX: rect.right,
|
||||
clientY: rect.bottom,
|
||||
} as React.MouseEvent
|
||||
handleContextMenu(syntheticEvent)
|
||||
}
|
||||
},
|
||||
[handleContextMenu]
|
||||
)
|
||||
|
||||
const deleteTable = useDeleteTable(workspaceId)
|
||||
|
||||
const handleDelete = async () => {
|
||||
try {
|
||||
await deleteTable.mutateAsync(table.id)
|
||||
setIsDeleteDialogOpen(false)
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete table:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToTable = () => {
|
||||
router.push(`/workspace/${workspaceId}/tables/${table.id}`)
|
||||
}
|
||||
|
||||
const href = `/workspace/${workspaceId}/tables/${table.id}`
|
||||
|
||||
const handleClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
if (isContextMenuOpen) {
|
||||
e.preventDefault()
|
||||
return
|
||||
}
|
||||
navigateToTable()
|
||||
},
|
||||
[isContextMenuOpen, navigateToTable]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
navigateToTable()
|
||||
}
|
||||
},
|
||||
[navigateToTable]
|
||||
)
|
||||
|
||||
const handleOpenInNewTab = useCallback(() => {
|
||||
window.open(href, '_blank')
|
||||
}, [href])
|
||||
|
||||
const handleViewSchema = useCallback(() => {
|
||||
setIsSchemaModalOpen(true)
|
||||
}, [])
|
||||
|
||||
const handleCopyId = useCallback(() => {
|
||||
navigator.clipboard.writeText(table.id)
|
||||
}, [table.id])
|
||||
|
||||
const handleDeleteFromContextMenu = useCallback(() => {
|
||||
setIsDeleteDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
const columnCount = table.schema.columns.length
|
||||
const shortId = `tb-${table.id.slice(0, 8)}`
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
role='button'
|
||||
tabIndex={0}
|
||||
data-table-card
|
||||
className='h-full cursor-pointer'
|
||||
onClick={handleClick}
|
||||
onKeyDown={handleKeyDown}
|
||||
onContextMenu={handleContextMenu}
|
||||
>
|
||||
<div className='group flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] transition-colors hover:bg-[var(--surface-4)] dark:bg-[var(--surface-4)] dark:hover:bg-[var(--surface-5)]'>
|
||||
<div className='flex items-center justify-between gap-[8px]'>
|
||||
<h3 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{table.name}
|
||||
</h3>
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
<Badge className='flex-shrink-0 rounded-[4px] text-[12px]'>{shortId}</Badge>
|
||||
<Button
|
||||
ref={menuButtonRef}
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-[20px] w-[20px] flex-shrink-0 p-0 text-[var(--text-tertiary)]'
|
||||
onClick={handleMenuButtonClick}
|
||||
>
|
||||
<svg className='h-[14px] w-[14px]' viewBox='0 0 16 16' fill='currentColor'>
|
||||
<circle cx='3' cy='8' r='1.5' />
|
||||
<circle cx='8' cy='8' r='1.5' />
|
||||
<circle cx='13' cy='8' r='1.5' />
|
||||
</svg>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-1 flex-col gap-[8px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-[12px] text-[12px] text-[var(--text-tertiary)]'>
|
||||
<span className='flex items-center gap-[4px]'>
|
||||
<Columns className='h-[12px] w-[12px]' />
|
||||
{columnCount} {columnCount === 1 ? 'col' : 'cols'}
|
||||
</span>
|
||||
<span className='flex items-center gap-[4px]'>
|
||||
<Rows3 className='h-[12px] w-[12px]' />
|
||||
{table.rowCount} {table.rowCount === 1 ? 'row' : 'rows'}
|
||||
</span>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
last updated: {formatRelativeTime(table.updatedAt)}
|
||||
</span>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>{formatAbsoluteDate(table.updatedAt)}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<div className='h-0 w-full border-[var(--divider)] border-t' />
|
||||
|
||||
<p className='line-clamp-2 h-[36px] text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
|
||||
{table.description || 'No description'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Delete Confirmation Modal */}
|
||||
<Modal open={isDeleteDialogOpen} onOpenChange={setIsDeleteDialogOpen}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Table</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>{table.name}</span>? This
|
||||
will permanently delete all {table.rowCount} rows.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={() => setIsDeleteDialogOpen(false)}
|
||||
disabled={deleteTable.isPending}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDelete} disabled={deleteTable.isPending}>
|
||||
{deleteTable.isPending ? 'Deleting...' : 'Delete'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
{/* Schema Viewer Modal */}
|
||||
<SchemaModal
|
||||
isOpen={isSchemaModalOpen}
|
||||
onClose={() => setIsSchemaModalOpen(false)}
|
||||
columns={table.schema.columns}
|
||||
/>
|
||||
|
||||
<TableCardContextMenu
|
||||
isOpen={isContextMenuOpen}
|
||||
position={contextMenuPosition}
|
||||
menuRef={menuRef}
|
||||
onClose={closeContextMenu}
|
||||
onOpenInNewTab={handleOpenInNewTab}
|
||||
onViewSchema={handleViewSchema}
|
||||
onCopyId={handleCopyId}
|
||||
onDelete={handleDeleteFromContextMenu}
|
||||
disableDelete={userPermissions.canEdit !== true}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
|
||||
interface TableListContextMenuProps {
|
||||
/**
|
||||
* Whether the context menu is open
|
||||
*/
|
||||
isOpen: boolean
|
||||
/**
|
||||
* Position of the context menu
|
||||
*/
|
||||
position: { x: number; y: number }
|
||||
/**
|
||||
* Ref for the menu element
|
||||
*/
|
||||
menuRef: React.RefObject<HTMLDivElement | null>
|
||||
/**
|
||||
* Callback when menu should close
|
||||
*/
|
||||
onClose: () => void
|
||||
/**
|
||||
* Callback when add table is clicked
|
||||
*/
|
||||
onAddTable?: () => void
|
||||
/**
|
||||
* Whether the add option is disabled
|
||||
* @default false
|
||||
*/
|
||||
disableAdd?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu component for the tables list page.
|
||||
* Displays "Add table" option when right-clicking on empty space.
|
||||
*/
|
||||
export function TableListContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onAddTable,
|
||||
disableAdd = false,
|
||||
}: TableListContextMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={(open) => !open && onClose()}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{onAddTable && (
|
||||
<PopoverItem
|
||||
disabled={disableAdd}
|
||||
onClick={() => {
|
||||
onAddTable()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Add table
|
||||
</PopoverItem>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,197 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { ChevronDown, Database, Search } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Button,
|
||||
Input,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useTablesList } from '@/hooks/queries/use-tables'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { filterTables, sortTables } from '../lib/utils'
|
||||
import { SORT_OPTIONS, type SortOption, type SortOrder } from './constants'
|
||||
import { CreateModal } from './create-modal'
|
||||
import { EmptyState } from './empty-state'
|
||||
import { ErrorState } from './error-state'
|
||||
import { LoadingState } from './loading-state'
|
||||
import { TableCard } from './table-card'
|
||||
import { TableListContextMenu } from './table-list-context-menu'
|
||||
|
||||
export function TablesView() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { data: tables = [], isLoading, error } = useTablesList(workspaceId)
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const debouncedSearchQuery = useDebounce(searchQuery, 300)
|
||||
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false)
|
||||
const [isSortPopoverOpen, setIsSortPopoverOpen] = useState(false)
|
||||
const [sortBy, setSortBy] = useState<SortOption>('updatedAt')
|
||||
const [sortOrder, setSortOrder] = useState<SortOrder>('desc')
|
||||
|
||||
const {
|
||||
isOpen: isListContextMenuOpen,
|
||||
position: listContextMenuPosition,
|
||||
menuRef: listMenuRef,
|
||||
handleContextMenu: handleListContextMenu,
|
||||
closeMenu: closeListContextMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
/**
|
||||
* Handle context menu on the content area - only show menu when clicking on empty space
|
||||
*/
|
||||
const handleContentContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
const target = e.target as HTMLElement
|
||||
const isOnCard = target.closest('[data-table-card]')
|
||||
const isOnInteractive = target.closest('button, input, a, [role="button"]')
|
||||
|
||||
if (!isOnCard && !isOnInteractive) {
|
||||
handleListContextMenu(e)
|
||||
}
|
||||
},
|
||||
[handleListContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle add table from context menu
|
||||
*/
|
||||
const handleAddTable = useCallback(() => {
|
||||
setIsCreateModalOpen(true)
|
||||
}, [])
|
||||
|
||||
const currentSortValue = `${sortBy}-${sortOrder}`
|
||||
const currentSortLabel =
|
||||
SORT_OPTIONS.find((opt) => opt.value === currentSortValue)?.label || 'Last Updated'
|
||||
|
||||
/**
|
||||
* Handles sort option change from dropdown
|
||||
*/
|
||||
const handleSortChange = (value: string) => {
|
||||
const [field, order] = value.split('-') as [SortOption, SortOrder]
|
||||
setSortBy(field)
|
||||
setSortOrder(order)
|
||||
setIsSortPopoverOpen(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter and sort tables based on search query and sort options
|
||||
*/
|
||||
const filteredAndSortedTables = useMemo(() => {
|
||||
const filtered = filterTables(tables, debouncedSearchQuery)
|
||||
return sortTables(filtered, sortBy, sortOrder)
|
||||
}, [tables, debouncedSearchQuery, sortBy, sortOrder])
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='flex h-full flex-1 flex-col'>
|
||||
<div className='flex flex-1 overflow-hidden'>
|
||||
<div
|
||||
className='flex flex-1 flex-col overflow-auto bg-white px-[24px] pt-[28px] pb-[24px] dark:bg-[var(--bg)]'
|
||||
onContextMenu={handleContentContextMenu}
|
||||
>
|
||||
<div>
|
||||
<div className='flex items-start gap-[12px]'>
|
||||
<div className='flex h-[26px] w-[26px] items-center justify-center rounded-[6px] border border-[#64748B] bg-[#F1F5F9] dark:border-[#334155] dark:bg-[#0F172A]'>
|
||||
<Database className='h-[14px] w-[14px] text-[#64748B] dark:text-[#CBD5E1]' />
|
||||
</div>
|
||||
<h1 className='font-medium text-[18px]'>Tables</h1>
|
||||
</div>
|
||||
<p className='mt-[10px] text-[14px] text-[var(--text-tertiary)]'>
|
||||
Create and manage data tables for your workflows.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='mt-[14px] flex items-center justify-between'>
|
||||
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
|
||||
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
<Input
|
||||
placeholder='Search'
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{tables.length > 0 && (
|
||||
<Popover open={isSortPopoverOpen} onOpenChange={setIsSortPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{currentSortLabel}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
{SORT_OPTIONS.map((option) => (
|
||||
<PopoverItem
|
||||
key={option.value}
|
||||
active={currentSortValue === option.value}
|
||||
onClick={() => handleSortChange(option.value)}
|
||||
>
|
||||
{option.label}
|
||||
</PopoverItem>
|
||||
))}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={() => setIsCreateModalOpen(true)}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Create
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to create tables</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-[24px] grid grid-cols-1 gap-[20px] md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4'>
|
||||
{isLoading ? (
|
||||
<LoadingState />
|
||||
) : filteredAndSortedTables.length === 0 ? (
|
||||
<EmptyState hasSearchQuery={!!debouncedSearchQuery} />
|
||||
) : error ? (
|
||||
<ErrorState error={error} />
|
||||
) : (
|
||||
filteredAndSortedTables.map((table) => (
|
||||
<TableCard key={table.id} table={table} workspaceId={workspaceId} />
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<CreateModal isOpen={isCreateModalOpen} onClose={() => setIsCreateModalOpen(false)} />
|
||||
|
||||
<TableListContextMenu
|
||||
isOpen={isListContextMenuOpen}
|
||||
position={listContextMenuPosition}
|
||||
menuRef={listMenuRef}
|
||||
onClose={closeListContextMenu}
|
||||
onAddTable={handleAddTable}
|
||||
disableAdd={userPermissions.canEdit !== true}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
7
apps/sim/app/workspace/[workspaceId]/tables/layout.tsx
Normal file
7
apps/sim/app/workspace/[workspaceId]/tables/layout.tsx
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function TablesLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
1
apps/sim/app/workspace/[workspaceId]/tables/lib/index.ts
Normal file
1
apps/sim/app/workspace/[workspaceId]/tables/lib/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './utils'
|
||||
83
apps/sim/app/workspace/[workspaceId]/tables/lib/utils.ts
Normal file
83
apps/sim/app/workspace/[workspaceId]/tables/lib/utils.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { TableDefinition } from '@/lib/table'
|
||||
import type { SortOption, SortOrder } from '../components/constants'
|
||||
|
||||
/**
|
||||
* Sort tables by the specified field and order
|
||||
*/
|
||||
export function sortTables(
|
||||
tables: TableDefinition[],
|
||||
sortBy: SortOption,
|
||||
sortOrder: SortOrder
|
||||
): TableDefinition[] {
|
||||
return [...tables].sort((a, b) => {
|
||||
let comparison = 0
|
||||
|
||||
switch (sortBy) {
|
||||
case 'name':
|
||||
comparison = a.name.localeCompare(b.name)
|
||||
break
|
||||
case 'createdAt':
|
||||
comparison = new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
|
||||
break
|
||||
case 'updatedAt':
|
||||
comparison = new Date(a.updatedAt).getTime() - new Date(b.updatedAt).getTime()
|
||||
break
|
||||
case 'rowCount':
|
||||
comparison = a.rowCount - b.rowCount
|
||||
break
|
||||
case 'columnCount':
|
||||
comparison = a.schema.columns.length - b.schema.columns.length
|
||||
break
|
||||
}
|
||||
|
||||
return sortOrder === 'asc' ? comparison : -comparison
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter tables by search query
|
||||
*/
|
||||
export function filterTables(tables: TableDefinition[], searchQuery: string): TableDefinition[] {
|
||||
if (!searchQuery.trim()) {
|
||||
return tables
|
||||
}
|
||||
|
||||
const query = searchQuery.toLowerCase()
|
||||
return tables.filter(
|
||||
(table) =>
|
||||
table.name.toLowerCase().includes(query) || table.description?.toLowerCase().includes(query)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date as relative time (e.g., "5m ago", "2d ago")
|
||||
*/
|
||||
export function formatRelativeTime(dateValue: string | Date): string {
|
||||
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) return 'just now'
|
||||
if (diffInSeconds < 3600) return `${Math.floor(diffInSeconds / 60)}m ago`
|
||||
if (diffInSeconds < 86400) return `${Math.floor(diffInSeconds / 3600)}h ago`
|
||||
if (diffInSeconds < 604800) return `${Math.floor(diffInSeconds / 86400)}d ago`
|
||||
if (diffInSeconds < 2592000) return `${Math.floor(diffInSeconds / 604800)}w ago`
|
||||
if (diffInSeconds < 31536000) return `${Math.floor(diffInSeconds / 2592000)}mo ago`
|
||||
return `${Math.floor(diffInSeconds / 31536000)}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date as absolute date string (e.g., "Jan 15, 2024, 10:30 AM")
|
||||
*/
|
||||
export function formatAbsoluteDate(dateValue: string | Date): string {
|
||||
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
26
apps/sim/app/workspace/[workspaceId]/tables/page.tsx
Normal file
26
apps/sim/app/workspace/[workspaceId]/tables/page.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import { TablesView } from './components'
|
||||
|
||||
interface TablesPageProps {
|
||||
params: Promise<{
|
||||
workspaceId: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function TablesPage({ params }: TablesPageProps) {
|
||||
const { workspaceId } = await params
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
return <TablesView />
|
||||
}
|
||||
@@ -212,8 +212,10 @@ export default function Templates({
|
||||
) : filteredTemplates.length === 0 ? (
|
||||
<div className='col-span-full flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-muted-foreground text-sm'>{emptyState.title}</p>
|
||||
<p className='mt-1 text-muted-foreground/70 text-xs'>{emptyState.description}</p>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{emptyState.title}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>{emptyState.description}</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
|
||||
@@ -246,6 +246,7 @@ export const Code = memo(function Code({
|
||||
case 'json-schema':
|
||||
return 'Describe the JSON schema to generate...'
|
||||
case 'json-object':
|
||||
case 'table-schema':
|
||||
return 'Describe the JSON object to generate...'
|
||||
default:
|
||||
return 'Describe the JavaScript code to generate...'
|
||||
@@ -270,9 +271,14 @@ export const Code = memo(function Code({
|
||||
return wandConfig
|
||||
}, [wandConfig, languageValue])
|
||||
|
||||
const [tableIdValue] = useSubBlockValue<string>(blockId, 'tableId')
|
||||
|
||||
const wandHook = useWand({
|
||||
wandConfig: dynamicWandConfig || { enabled: false, prompt: '' },
|
||||
currentValue: code,
|
||||
contextParams: {
|
||||
tableId: typeof tableIdValue === 'string' ? tableIdValue : null,
|
||||
},
|
||||
onStreamStart: () => handleStreamStartRef.current?.(),
|
||||
onStreamChunk: (chunk: string) => handleStreamChunkRef.current?.(chunk),
|
||||
onGeneratedContent: (content: string) => handleGeneratedContentRef.current?.(content),
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
interface EmptyStateProps {
|
||||
onAdd: () => void
|
||||
disabled: boolean
|
||||
label: string
|
||||
}
|
||||
|
||||
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
|
||||
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
{label}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, Combobox, type ComboboxOption, Input } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
|
||||
interface FilterRuleRowProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
rule: FilterRule
|
||||
index: number
|
||||
columns: ComboboxOption[]
|
||||
comparisonOptions: ComboboxOption[]
|
||||
logicalOptions: ComboboxOption[]
|
||||
isReadOnly: boolean
|
||||
isPreview: boolean
|
||||
disabled: boolean
|
||||
onRemove: (id: string) => void
|
||||
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
|
||||
}
|
||||
|
||||
export function FilterRuleRow({
|
||||
blockId,
|
||||
subBlockId,
|
||||
rule,
|
||||
index,
|
||||
columns,
|
||||
comparisonOptions,
|
||||
logicalOptions,
|
||||
isReadOnly,
|
||||
isPreview,
|
||||
disabled,
|
||||
onRemove,
|
||||
onUpdate,
|
||||
}: FilterRuleRowProps) {
|
||||
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
disabled={isReadOnly}
|
||||
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
|
||||
<div className='w-[80px] shrink-0'>
|
||||
{index === 0 ? (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[{ value: 'where', label: 'where' }]}
|
||||
value='where'
|
||||
disabled
|
||||
/>
|
||||
) : (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={logicalOptions}
|
||||
value={rule.logicalOperator}
|
||||
onChange={(v) => onUpdate(rule.id, 'logicalOperator', v as 'and' | 'or')}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='w-[100px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={columns}
|
||||
value={rule.column}
|
||||
onChange={(v) => onUpdate(rule.id, 'column', v)}
|
||||
placeholder='Column'
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='w-[110px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={comparisonOptions}
|
||||
value={rule.operator}
|
||||
onChange={(v) => onUpdate(rule.id, 'operator', v)}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='relative min-w-[80px] flex-1'>
|
||||
<SubBlockInputController
|
||||
blockId={blockId}
|
||||
subBlockId={`${subBlockId}_filter_${rule.id}`}
|
||||
config={{ id: `filter_value_${rule.id}`, type: 'short-input' }}
|
||||
value={rule.value}
|
||||
onChange={(newValue) => onUpdate(rule.id, 'value', newValue)}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
>
|
||||
{({ ref, value: ctrlValue, onChange, onKeyDown, onDrop, onDragOver }) => {
|
||||
const formattedText = formatDisplayText(ctrlValue, {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})
|
||||
|
||||
return (
|
||||
<div className='relative'>
|
||||
<Input
|
||||
ref={ref as React.RefObject<HTMLInputElement>}
|
||||
className='h-[28px] w-full overflow-auto text-[12px] text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden'
|
||||
value={ctrlValue}
|
||||
onChange={onChange as (e: React.ChangeEvent<HTMLInputElement>) => void}
|
||||
onKeyDown={onKeyDown as (e: React.KeyboardEvent<HTMLInputElement>) => void}
|
||||
onDrop={onDrop as (e: React.DragEvent<HTMLInputElement>) => void}
|
||||
onDragOver={onDragOver as (e: React.DragEvent<HTMLInputElement>) => void}
|
||||
placeholder='Value'
|
||||
disabled={isReadOnly}
|
||||
autoComplete='off'
|
||||
/>
|
||||
<div
|
||||
className={cn(
|
||||
'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-[12px] text-foreground [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
|
||||
(isPreview || disabled) && 'opacity-50'
|
||||
)}
|
||||
>
|
||||
<div className='min-w-fit whitespace-pre'>{formattedText}</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}}
|
||||
</SubBlockInputController>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { EmptyState } from './components/empty-state'
|
||||
import { FilterRuleRow } from './components/filter-rule-row'
|
||||
|
||||
interface FilterBuilderProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
isPreview?: boolean
|
||||
previewValue?: FilterRule[] | null
|
||||
disabled?: boolean
|
||||
columns?: Array<{ value: string; label: string }>
|
||||
tableIdSubBlockId?: string
|
||||
}
|
||||
|
||||
/** Visual builder for table filter rules in workflow blocks. */
|
||||
export function FilterBuilder({
|
||||
blockId,
|
||||
subBlockId,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
disabled = false,
|
||||
columns: propColumns,
|
||||
tableIdSubBlockId = 'tableId',
|
||||
}: FilterBuilderProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<FilterRule[]>(blockId, subBlockId)
|
||||
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
|
||||
const dynamicColumns = useTableColumns({ tableId: tableIdValue })
|
||||
const columns = useMemo(() => {
|
||||
if (propColumns && propColumns.length > 0) return propColumns
|
||||
return dynamicColumns
|
||||
}, [propColumns, dynamicColumns])
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const rules: FilterRule[] = Array.isArray(value) && value.length > 0 ? value : []
|
||||
const isReadOnly = isPreview || disabled
|
||||
|
||||
const { comparisonOptions, logicalOptions, addRule, removeRule, updateRule } = useFilterBuilder({
|
||||
columns,
|
||||
rules,
|
||||
setRules: setStoreValue,
|
||||
isReadOnly,
|
||||
})
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{rules.length === 0 ? (
|
||||
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add filter rule' />
|
||||
) : (
|
||||
<>
|
||||
{rules.map((rule, index) => (
|
||||
<FilterRuleRow
|
||||
key={rule.id}
|
||||
blockId={blockId}
|
||||
subBlockId={subBlockId}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
comparisonOptions={comparisonOptions}
|
||||
logicalOptions={logicalOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
onRemove={removeRule}
|
||||
onUpdate={updateRule}
|
||||
/>
|
||||
))}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={addRule}
|
||||
disabled={isReadOnly}
|
||||
className='self-start'
|
||||
>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add rule
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -9,6 +9,7 @@ export { Dropdown } from './dropdown/dropdown'
|
||||
export { EvalInput } from './eval-input/eval-input'
|
||||
export { FileSelectorInput } from './file-selector/file-selector-input'
|
||||
export { FileUpload } from './file-upload/file-upload'
|
||||
export { FilterBuilder } from './filter-builder/filter-builder'
|
||||
export { FolderSelectorInput } from './folder-selector/components/folder-selector-input'
|
||||
export { GroupedCheckboxList } from './grouped-checkbox-list/grouped-checkbox-list'
|
||||
export { InputMapping } from './input-mapping/input-mapping'
|
||||
@@ -26,10 +27,12 @@ export { SheetSelectorInput } from './sheet-selector/sheet-selector-input'
|
||||
export { ShortInput } from './short-input/short-input'
|
||||
export { SlackSelectorInput } from './slack-selector/slack-selector-input'
|
||||
export { SliderInput } from './slider-input/slider-input'
|
||||
export { SortBuilder } from './sort-builder/sort-builder'
|
||||
export { InputFormat } from './starter/input-format'
|
||||
export { SubBlockInputController } from './sub-block-input-controller'
|
||||
export { Switch } from './switch/switch'
|
||||
export { Table } from './table/table'
|
||||
export { TableSelector } from './table-selector/table-selector'
|
||||
export { Text } from './text/text'
|
||||
export { TimeInput } from './time-input/time-input'
|
||||
export { ToolInput } from './tool-input/tool-input'
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
interface EmptyStateProps {
|
||||
onAdd: () => void
|
||||
disabled: boolean
|
||||
label: string
|
||||
}
|
||||
|
||||
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
|
||||
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
{label}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, Combobox, type ComboboxOption } from '@/components/emcn'
|
||||
import type { SortRule } from '@/lib/table/query-builder/constants'
|
||||
|
||||
interface SortRuleRowProps {
|
||||
rule: SortRule
|
||||
index: number
|
||||
columns: ComboboxOption[]
|
||||
directionOptions: ComboboxOption[]
|
||||
isReadOnly: boolean
|
||||
onRemove: (id: string) => void
|
||||
onUpdate: (id: string, field: keyof SortRule, value: string) => void
|
||||
}
|
||||
|
||||
export function SortRuleRow({
|
||||
rule,
|
||||
index,
|
||||
columns,
|
||||
directionOptions,
|
||||
isReadOnly,
|
||||
onRemove,
|
||||
onUpdate,
|
||||
}: SortRuleRowProps) {
|
||||
return (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
disabled={isReadOnly}
|
||||
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
|
||||
<div className='w-[90px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[{ value: String(index + 1), label: index === 0 ? 'order by' : 'then by' }]}
|
||||
value={String(index + 1)}
|
||||
disabled
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='min-w-[120px] flex-1'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={columns}
|
||||
value={rule.column}
|
||||
onChange={(v) => onUpdate(rule.id, 'column', v)}
|
||||
placeholder='Column'
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='w-[110px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={directionOptions}
|
||||
value={rule.direction}
|
||||
onChange={(v) => onUpdate(rule.id, 'direction', v as 'asc' | 'desc')}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { Button, type ComboboxOption } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import { SORT_DIRECTIONS, type SortRule } from '@/lib/table/query-builder/constants'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { EmptyState } from './components/empty-state'
|
||||
import { SortRuleRow } from './components/sort-rule-row'
|
||||
|
||||
interface SortBuilderProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
isPreview?: boolean
|
||||
previewValue?: SortRule[] | null
|
||||
disabled?: boolean
|
||||
columns?: Array<{ value: string; label: string }>
|
||||
tableIdSubBlockId?: string
|
||||
}
|
||||
|
||||
const createDefaultRule = (columns: ComboboxOption[]): SortRule => ({
|
||||
id: nanoid(),
|
||||
column: columns[0]?.value || '',
|
||||
direction: 'asc',
|
||||
})
|
||||
|
||||
/** Visual builder for table sort rules in workflow blocks. */
|
||||
export function SortBuilder({
|
||||
blockId,
|
||||
subBlockId,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
disabled = false,
|
||||
columns: propColumns,
|
||||
tableIdSubBlockId = 'tableId',
|
||||
}: SortBuilderProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<SortRule[]>(blockId, subBlockId)
|
||||
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
|
||||
const dynamicColumns = useTableColumns({ tableId: tableIdValue, includeBuiltIn: true })
|
||||
const columns = useMemo(() => {
|
||||
if (propColumns && propColumns.length > 0) return propColumns
|
||||
return dynamicColumns
|
||||
}, [propColumns, dynamicColumns])
|
||||
|
||||
const directionOptions = useMemo(
|
||||
() => SORT_DIRECTIONS.map((dir) => ({ value: dir.value, label: dir.label })),
|
||||
[]
|
||||
)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const rules: SortRule[] = Array.isArray(value) && value.length > 0 ? value : []
|
||||
const isReadOnly = isPreview || disabled
|
||||
|
||||
const addRule = useCallback(() => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue([...rules, createDefaultRule(columns)])
|
||||
}, [isReadOnly, rules, columns, setStoreValue])
|
||||
|
||||
const removeRule = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue(rules.filter((r) => r.id !== id))
|
||||
},
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
)
|
||||
|
||||
const updateRule = useCallback(
|
||||
(id: string, field: keyof SortRule, newValue: string) => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue(rules.map((r) => (r.id === id ? { ...r, [field]: newValue } : r)))
|
||||
},
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
)
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{rules.length === 0 ? (
|
||||
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add sort rule' />
|
||||
) : (
|
||||
<>
|
||||
{rules.map((rule, index) => (
|
||||
<SortRuleRow
|
||||
key={rule.id}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
directionOptions={directionOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
onRemove={removeRule}
|
||||
onUpdate={updateRule}
|
||||
/>
|
||||
))}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={addRule}
|
||||
disabled={isReadOnly}
|
||||
className='self-start'
|
||||
>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add sort
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useTablesList } from '@/hooks/queries/use-tables'
|
||||
|
||||
interface TableSelectorProps {
|
||||
blockId: string
|
||||
subBlock: SubBlockConfig
|
||||
disabled?: boolean
|
||||
isPreview?: boolean
|
||||
previewValue?: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Table selector component for selecting workspace tables
|
||||
*
|
||||
* @remarks
|
||||
* Provides a combobox to select workspace tables.
|
||||
* Uses React Query for efficient data fetching and caching.
|
||||
* The external link to navigate to the table is shown in the label area.
|
||||
*/
|
||||
export function TableSelector({
|
||||
blockId,
|
||||
subBlock,
|
||||
disabled = false,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: TableSelectorProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<string>(blockId, subBlock.id)
|
||||
|
||||
// Use React Query hook for table data - it handles caching, loading, and error states
|
||||
const {
|
||||
data: tables = [],
|
||||
isLoading,
|
||||
error,
|
||||
} = useTablesList(isPreview || disabled ? undefined : workspaceId)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const tableId = typeof value === 'string' ? value : null
|
||||
|
||||
const options = useMemo<ComboboxOption[]>(() => {
|
||||
return tables.map((table) => ({
|
||||
label: table.name.toLowerCase(),
|
||||
value: table.id,
|
||||
}))
|
||||
}, [tables])
|
||||
|
||||
const handleChange = useCallback(
|
||||
(selectedValue: string) => {
|
||||
if (isPreview || disabled) return
|
||||
setStoreValue(selectedValue)
|
||||
},
|
||||
[isPreview, disabled, setStoreValue]
|
||||
)
|
||||
|
||||
// Convert error object to string if needed
|
||||
const errorMessage = error instanceof Error ? error.message : error ? String(error) : undefined
|
||||
|
||||
return (
|
||||
<Combobox
|
||||
options={options}
|
||||
value={tableId ?? undefined}
|
||||
onChange={handleChange}
|
||||
placeholder={subBlock.placeholder || 'Select a table'}
|
||||
disabled={disabled || isPreview}
|
||||
editable={false}
|
||||
isLoading={isLoading}
|
||||
error={errorMessage}
|
||||
searchable={options.length > 5}
|
||||
searchPlaceholder='Search...'
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -19,11 +19,11 @@ interface TableProps {
|
||||
subBlockId: string
|
||||
columns: string[]
|
||||
isPreview?: boolean
|
||||
previewValue?: TableRow[] | null
|
||||
previewValue?: WorkflowTableRow[] | null
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
interface TableRow {
|
||||
interface WorkflowTableRow {
|
||||
id: string
|
||||
cells: Record<string, string>
|
||||
}
|
||||
@@ -38,7 +38,7 @@ export function Table({
|
||||
}: TableProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<TableRow[]>(blockId, subBlockId)
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<WorkflowTableRow[]>(blockId, subBlockId)
|
||||
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
|
||||
|
||||
// Use the extended hook for field-level management
|
||||
@@ -73,7 +73,7 @@ export function Table({
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!isPreview && !disabled && (!Array.isArray(storeValue) || storeValue.length === 0)) {
|
||||
const initialRow: TableRow = {
|
||||
const initialRow: WorkflowTableRow = {
|
||||
id: crypto.randomUUID(),
|
||||
cells: { ...emptyCellsTemplate },
|
||||
}
|
||||
@@ -110,7 +110,7 @@ export function Table({
|
||||
}
|
||||
})
|
||||
|
||||
return validatedRows as TableRow[]
|
||||
return validatedRows as WorkflowTableRow[]
|
||||
}, [value, emptyCellsTemplate])
|
||||
|
||||
// Helper to update a cell value
|
||||
@@ -164,7 +164,12 @@ export function Table({
|
||||
</thead>
|
||||
)
|
||||
|
||||
const renderCell = (row: TableRow, rowIndex: number, column: string, cellIndex: number) => {
|
||||
const renderCell = (
|
||||
row: WorkflowTableRow,
|
||||
rowIndex: number,
|
||||
column: string,
|
||||
cellIndex: number
|
||||
) => {
|
||||
// Defensive programming: ensure row.cells exists and has the expected structure
|
||||
const hasValidCells = row.cells && typeof row.cells === 'object'
|
||||
if (!hasValidCells) logger.warn('Table row has malformed cells data:', row)
|
||||
|
||||
@@ -43,6 +43,7 @@ import {
|
||||
SlackSelectorInput,
|
||||
SliderInput,
|
||||
Table,
|
||||
TableSelector,
|
||||
TimeInput,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
|
||||
import { DocumentSelector } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/document-selector/document-selector'
|
||||
@@ -508,6 +509,40 @@ function TableSyncWrapper({
|
||||
)
|
||||
}
|
||||
|
||||
function TableSelectorSyncWrapper({
|
||||
blockId,
|
||||
paramId,
|
||||
value,
|
||||
onChange,
|
||||
uiComponent,
|
||||
disabled,
|
||||
isPreview,
|
||||
}: {
|
||||
blockId: string
|
||||
paramId: string
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
uiComponent: any
|
||||
disabled: boolean
|
||||
isPreview: boolean
|
||||
}) {
|
||||
return (
|
||||
<GenericSyncWrapper blockId={blockId} paramId={paramId} value={value} onChange={onChange}>
|
||||
<TableSelector
|
||||
blockId={blockId}
|
||||
subBlock={{
|
||||
id: paramId,
|
||||
type: 'table-selector',
|
||||
placeholder: uiComponent.placeholder || 'Select a table',
|
||||
}}
|
||||
disabled={disabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={value || null}
|
||||
/>
|
||||
</GenericSyncWrapper>
|
||||
)
|
||||
}
|
||||
|
||||
function TimeInputSyncWrapper({
|
||||
blockId,
|
||||
paramId,
|
||||
@@ -965,6 +1000,7 @@ const BUILT_IN_TOOL_TYPES = new Set([
|
||||
'tts',
|
||||
'stt',
|
||||
'memory',
|
||||
'table',
|
||||
'webhook_request',
|
||||
'workflow',
|
||||
])
|
||||
@@ -1142,7 +1178,8 @@ export const ToolInput = memo(function ToolInput({
|
||||
block.type === 'workflow' ||
|
||||
block.type === 'workflow_input' ||
|
||||
block.type === 'knowledge' ||
|
||||
block.type === 'function') &&
|
||||
block.type === 'function' ||
|
||||
block.type === 'table') &&
|
||||
block.type !== 'evaluator' &&
|
||||
block.type !== 'mcp' &&
|
||||
block.type !== 'file'
|
||||
@@ -2140,6 +2177,19 @@ export const ToolInput = memo(function ToolInput({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'table-selector':
|
||||
return (
|
||||
<TableSelectorSyncWrapper
|
||||
blockId={blockId}
|
||||
paramId={param.id}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
uiComponent={uiComponent}
|
||||
disabled={disabled}
|
||||
isPreview={isPreview}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'combobox':
|
||||
return (
|
||||
<ComboboxSyncWrapper
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { type JSX, type MouseEvent, memo, useRef, useState } from 'react'
|
||||
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp } from 'lucide-react'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, ExternalLink } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants'
|
||||
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
|
||||
import {
|
||||
CheckboxList,
|
||||
@@ -16,6 +18,7 @@ import {
|
||||
EvalInput,
|
||||
FileSelectorInput,
|
||||
FileUpload,
|
||||
FilterBuilder,
|
||||
FolderSelectorInput,
|
||||
GroupedCheckboxList,
|
||||
InputFormat,
|
||||
@@ -34,8 +37,10 @@ import {
|
||||
ShortInput,
|
||||
SlackSelectorInput,
|
||||
SliderInput,
|
||||
SortBuilder,
|
||||
Switch,
|
||||
Table,
|
||||
TableSelector,
|
||||
Text,
|
||||
TimeInput,
|
||||
ToolInput,
|
||||
@@ -171,6 +176,7 @@ const getPreviewValue = (
|
||||
* @param wandState - Optional state and handlers for the AI wand feature
|
||||
* @param canonicalToggle - Optional canonical toggle metadata and handlers
|
||||
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
|
||||
* @param tableLinkState - Optional state for table selector external link
|
||||
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
||||
*/
|
||||
const renderLabel = (
|
||||
@@ -196,7 +202,11 @@ const renderLabel = (
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
},
|
||||
canonicalToggleIsDisabled?: boolean
|
||||
canonicalToggleIsDisabled?: boolean,
|
||||
tableLinkState?: {
|
||||
hasSelectedTable: boolean
|
||||
onNavigateToTable: () => void
|
||||
}
|
||||
): JSX.Element | null => {
|
||||
if (config.type === 'switch') return null
|
||||
if (!config.title) return null
|
||||
@@ -205,6 +215,11 @@ const renderLabel = (
|
||||
const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled
|
||||
const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview
|
||||
const canonicalToggleDisabledResolved = canonicalToggleIsDisabled ?? canonicalToggle?.disabled
|
||||
const showTableLink =
|
||||
config.type === 'table-selector' &&
|
||||
tableLinkState?.hasSelectedTable &&
|
||||
!wandState?.isPreview &&
|
||||
!wandState?.disabled
|
||||
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||
@@ -284,6 +299,23 @@ const renderLabel = (
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{showTableLink && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0'
|
||||
onClick={tableLinkState.onNavigateToTable}
|
||||
aria-label='View table'
|
||||
>
|
||||
<ExternalLink className='!h-[12px] !w-[12px] text-[var(--text-secondary)] hover:text-[var(--text-primary)]' />
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>View table</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
{showCanonicalToggle && (
|
||||
<button
|
||||
type='button'
|
||||
@@ -364,6 +396,9 @@ function SubBlockComponent({
|
||||
allowExpandInPreview,
|
||||
canonicalToggle,
|
||||
}: SubBlockProps): JSX.Element {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const [isValidJson, setIsValidJson] = useState(true)
|
||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
@@ -381,6 +416,20 @@ function SubBlockComponent({
|
||||
// Check if wand is enabled for this sub-block
|
||||
const isWandEnabled = config.wandConfig?.enabled ?? false
|
||||
|
||||
// Table selector link state
|
||||
const tableValue = subBlockValues?.[config.id]?.value
|
||||
const tableId = typeof tableValue === 'string' ? tableValue : null
|
||||
const hasSelectedTable = Boolean(tableId && !tableId.startsWith('<'))
|
||||
|
||||
/**
|
||||
* Handles navigation to the selected table in a new tab.
|
||||
*/
|
||||
const handleNavigateToTable = useCallback(() => {
|
||||
if (tableId && workspaceId) {
|
||||
window.open(`/workspace/${workspaceId}/tables/${tableId}`, '_blank')
|
||||
}
|
||||
}, [workspaceId, tableId])
|
||||
|
||||
/**
|
||||
* Handles wand icon click to activate inline prompt mode.
|
||||
* Focuses the input after a brief delay to ensure DOM is ready.
|
||||
@@ -515,6 +564,19 @@ function SubBlockComponent({
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'table-selector':
|
||||
return (
|
||||
<div onMouseDown={handleMouseDown}>
|
||||
<TableSelector
|
||||
blockId={blockId}
|
||||
subBlock={config}
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as string | null}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'combobox':
|
||||
return (
|
||||
<div onMouseDown={handleMouseDown}>
|
||||
@@ -857,6 +919,28 @@ function SubBlockComponent({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'filter-builder':
|
||||
return (
|
||||
<FilterBuilder
|
||||
blockId={blockId}
|
||||
subBlockId={config.id}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as FilterRule[] | null | undefined}
|
||||
disabled={isDisabled}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'sort-builder':
|
||||
return (
|
||||
<SortBuilder
|
||||
blockId={blockId}
|
||||
subBlockId={config.id}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as SortRule[] | null | undefined}
|
||||
disabled={isDisabled}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'channel-selector':
|
||||
case 'user-selector':
|
||||
return (
|
||||
@@ -953,7 +1037,11 @@ function SubBlockComponent({
|
||||
searchInputRef,
|
||||
},
|
||||
canonicalToggle,
|
||||
Boolean(canonicalToggle?.disabled || disabled || isPreview)
|
||||
Boolean(canonicalToggle?.disabled || disabled || isPreview),
|
||||
{
|
||||
hasSelectedTable,
|
||||
onNavigateToTable: handleNavigateToTable,
|
||||
}
|
||||
)}
|
||||
{renderInput()}
|
||||
</div>
|
||||
|
||||
@@ -9,6 +9,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createMcpToolId } from '@/lib/mcp/utils'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import type { FilterRule, SortRule } from '@/lib/table/types'
|
||||
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -40,6 +41,7 @@ import { useCustomTools } from '@/hooks/queries/custom-tools'
|
||||
import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
|
||||
import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedules'
|
||||
import { useTablesList } from '@/hooks/queries/use-tables'
|
||||
import { useDeployChildWorkflow } from '@/hooks/queries/workflows'
|
||||
import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
@@ -54,9 +56,9 @@ const logger = createLogger('WorkflowBlock')
|
||||
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
|
||||
|
||||
/**
|
||||
* Type guard for table row structure
|
||||
* Type guard for workflow table row structure (sub-block table inputs)
|
||||
*/
|
||||
interface TableRow {
|
||||
interface WorkflowTableRow {
|
||||
id: string
|
||||
cells: Record<string, string>
|
||||
}
|
||||
@@ -75,7 +77,7 @@ interface FieldFormat {
|
||||
/**
|
||||
* Checks if a value is a table row array
|
||||
*/
|
||||
const isTableRowArray = (value: unknown): value is TableRow[] => {
|
||||
const isTableRowArray = (value: unknown): value is WorkflowTableRow[] => {
|
||||
if (!Array.isArray(value) || value.length === 0) return false
|
||||
const firstItem = value[0]
|
||||
return (
|
||||
@@ -94,7 +96,11 @@ const isFieldFormatArray = (value: unknown): value is FieldFormat[] => {
|
||||
if (!Array.isArray(value) || value.length === 0) return false
|
||||
const firstItem = value[0]
|
||||
return (
|
||||
typeof firstItem === 'object' && firstItem !== null && 'id' in firstItem && 'name' in firstItem
|
||||
typeof firstItem === 'object' &&
|
||||
firstItem !== null &&
|
||||
'id' in firstItem &&
|
||||
'name' in firstItem &&
|
||||
typeof firstItem.name === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
@@ -160,7 +166,8 @@ const isTagFilterArray = (value: unknown): value is TagFilterItem[] => {
|
||||
typeof firstItem === 'object' &&
|
||||
firstItem !== null &&
|
||||
'tagName' in firstItem &&
|
||||
'tagValue' in firstItem
|
||||
'tagValue' in firstItem &&
|
||||
typeof firstItem.tagName === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
@@ -182,7 +189,40 @@ const isDocumentTagArray = (value: unknown): value is DocumentTagItem[] => {
|
||||
firstItem !== null &&
|
||||
'tagName' in firstItem &&
|
||||
'value' in firstItem &&
|
||||
!('tagValue' in firstItem) // Distinguish from tag filters
|
||||
!('tagValue' in firstItem) && // Distinguish from tag filters
|
||||
typeof firstItem.tagName === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for filter condition array (used in table block filter builder)
|
||||
*/
|
||||
const isFilterConditionArray = (value: unknown): value is FilterRule[] => {
|
||||
if (!Array.isArray(value) || value.length === 0) return false
|
||||
const firstItem = value[0]
|
||||
return (
|
||||
typeof firstItem === 'object' &&
|
||||
firstItem !== null &&
|
||||
'column' in firstItem &&
|
||||
'operator' in firstItem &&
|
||||
'logicalOperator' in firstItem &&
|
||||
typeof firstItem.column === 'string'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for sort condition array (used in table block sort builder)
|
||||
*/
|
||||
const isSortConditionArray = (value: unknown): value is SortRule[] => {
|
||||
if (!Array.isArray(value) || value.length === 0) return false
|
||||
const firstItem = value[0]
|
||||
return (
|
||||
typeof firstItem === 'object' &&
|
||||
firstItem !== null &&
|
||||
'column' in firstItem &&
|
||||
'direction' in firstItem &&
|
||||
typeof firstItem.column === 'string' &&
|
||||
(firstItem.direction === 'asc' || firstItem.direction === 'desc')
|
||||
)
|
||||
}
|
||||
|
||||
@@ -230,7 +270,9 @@ export const getDisplayValue = (value: unknown): string => {
|
||||
}
|
||||
|
||||
if (isTagFilterArray(parsedValue)) {
|
||||
const validFilters = parsedValue.filter((f) => f.tagName?.trim())
|
||||
const validFilters = parsedValue.filter(
|
||||
(f) => typeof f.tagName === 'string' && f.tagName.trim() !== ''
|
||||
)
|
||||
if (validFilters.length === 0) return '-'
|
||||
if (validFilters.length === 1) return validFilters[0].tagName
|
||||
if (validFilters.length === 2) return `${validFilters[0].tagName}, ${validFilters[1].tagName}`
|
||||
@@ -238,13 +280,54 @@ export const getDisplayValue = (value: unknown): string => {
|
||||
}
|
||||
|
||||
if (isDocumentTagArray(parsedValue)) {
|
||||
const validTags = parsedValue.filter((t) => t.tagName?.trim())
|
||||
const validTags = parsedValue.filter(
|
||||
(t) => typeof t.tagName === 'string' && t.tagName.trim() !== ''
|
||||
)
|
||||
if (validTags.length === 0) return '-'
|
||||
if (validTags.length === 1) return validTags[0].tagName
|
||||
if (validTags.length === 2) return `${validTags[0].tagName}, ${validTags[1].tagName}`
|
||||
return `${validTags[0].tagName}, ${validTags[1].tagName} +${validTags.length - 2}`
|
||||
}
|
||||
|
||||
if (isFilterConditionArray(parsedValue)) {
|
||||
const validConditions = parsedValue.filter(
|
||||
(c) => typeof c.column === 'string' && c.column.trim() !== ''
|
||||
)
|
||||
if (validConditions.length === 0) return '-'
|
||||
const formatCondition = (c: FilterRule) => {
|
||||
const opLabels: Record<string, string> = {
|
||||
eq: '=',
|
||||
ne: '≠',
|
||||
gt: '>',
|
||||
gte: '≥',
|
||||
lt: '<',
|
||||
lte: '≤',
|
||||
contains: '~',
|
||||
in: 'in',
|
||||
}
|
||||
const op = opLabels[c.operator] || c.operator
|
||||
return `${c.column} ${op} ${c.value || '?'}`
|
||||
}
|
||||
if (validConditions.length === 1) return formatCondition(validConditions[0])
|
||||
if (validConditions.length === 2) {
|
||||
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])}`
|
||||
}
|
||||
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])} +${validConditions.length - 2}`
|
||||
}
|
||||
|
||||
if (isSortConditionArray(parsedValue)) {
|
||||
const validConditions = parsedValue.filter(
|
||||
(c) => typeof c.column === 'string' && c.column.trim() !== ''
|
||||
)
|
||||
if (validConditions.length === 0) return '-'
|
||||
const formatSort = (c: SortRule) => `${c.column} ${c.direction === 'desc' ? '↓' : '↑'}`
|
||||
if (validConditions.length === 1) return formatSort(validConditions[0])
|
||||
if (validConditions.length === 2) {
|
||||
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])}`
|
||||
}
|
||||
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])} +${validConditions.length - 2}`
|
||||
}
|
||||
|
||||
if (isTableRowArray(parsedValue)) {
|
||||
const nonEmptyRows = parsedValue.filter((row) => {
|
||||
const cellValues = Object.values(row.cells)
|
||||
@@ -266,7 +349,9 @@ export const getDisplayValue = (value: unknown): string => {
|
||||
}
|
||||
|
||||
if (isFieldFormatArray(parsedValue)) {
|
||||
const namedFields = parsedValue.filter((field) => field.name && field.name.trim() !== '')
|
||||
const namedFields = parsedValue.filter(
|
||||
(field) => typeof field.name === 'string' && field.name.trim() !== ''
|
||||
)
|
||||
if (namedFields.length === 0) return '-'
|
||||
if (namedFields.length === 1) return namedFields[0].name
|
||||
if (namedFields.length === 2) return `${namedFields[0].name}, ${namedFields[1].name}`
|
||||
@@ -512,6 +597,15 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
return tool?.name ?? null
|
||||
}, [subBlock?.type, rawValue, mcpToolsData])
|
||||
|
||||
const { data: tables = [] } = useTablesList(workspaceId || '')
|
||||
const tableDisplayName = useMemo(() => {
|
||||
if (subBlock?.id !== 'tableId' || typeof rawValue !== 'string') {
|
||||
return null
|
||||
}
|
||||
const table = tables.find((t) => t.id === rawValue)
|
||||
return table?.name ?? null
|
||||
}, [subBlock?.id, rawValue, tables])
|
||||
|
||||
const webhookUrlDisplayValue = useMemo(() => {
|
||||
if (subBlock?.id !== 'webhookUrlDisplay' || !blockId) {
|
||||
return null
|
||||
@@ -618,19 +712,43 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
return `${toolNames[0]}, ${toolNames[1]} +${toolNames.length - 2}`
|
||||
}, [subBlock?.type, rawValue, customTools, workspaceId])
|
||||
|
||||
const filterDisplayValue = useMemo(() => {
|
||||
const isFilterField =
|
||||
subBlock?.id === 'filter' || subBlock?.id === 'filterCriteria' || subBlock?.id === 'sort'
|
||||
|
||||
if (!isFilterField || !rawValue) return null
|
||||
|
||||
const parsedValue = tryParseJson(rawValue)
|
||||
|
||||
if (isPlainObject(parsedValue) || Array.isArray(parsedValue)) {
|
||||
try {
|
||||
const jsonStr = JSON.stringify(parsedValue, null, 0)
|
||||
if (jsonStr.length <= 35) return jsonStr
|
||||
return `${jsonStr.slice(0, 32)}...`
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}, [subBlock?.id, rawValue])
|
||||
|
||||
const isPasswordField = subBlock?.password === true
|
||||
const maskedValue = isPasswordField && value && value !== '-' ? '•••' : null
|
||||
const isMonospaceField = Boolean(filterDisplayValue)
|
||||
|
||||
const isSelectorType = subBlock?.type && SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlock.type)
|
||||
const hydratedName =
|
||||
credentialName ||
|
||||
dropdownLabel ||
|
||||
variablesDisplayValue ||
|
||||
filterDisplayValue ||
|
||||
toolsDisplayValue ||
|
||||
knowledgeBaseDisplayName ||
|
||||
workflowSelectionName ||
|
||||
mcpServerDisplayName ||
|
||||
mcpToolDisplayName ||
|
||||
tableDisplayName ||
|
||||
webhookUrlDisplayValue ||
|
||||
selectorDisplayName
|
||||
const displayValue = maskedValue || hydratedName || (isSelectorType && value ? '-' : value)
|
||||
@@ -645,7 +763,10 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
</span>
|
||||
{displayValue !== undefined && (
|
||||
<span
|
||||
className='flex-1 truncate text-right text-[14px] text-[var(--text-primary)]'
|
||||
className={cn(
|
||||
'flex-1 truncate text-right text-[14px] text-[var(--text-primary)]',
|
||||
isMonospaceField && 'font-mono'
|
||||
)}
|
||||
title={displayValue}
|
||||
>
|
||||
{displayValue}
|
||||
|
||||
@@ -3,23 +3,37 @@ import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { GenerationType } from '@/blocks/types'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('useWand')
|
||||
|
||||
interface ChatMessage {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content: string
|
||||
}
|
||||
|
||||
interface BuildWandContextInfoOptions {
|
||||
currentValue?: string
|
||||
generationType?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds rich context information based on current content and generation type
|
||||
* Builds rich context information based on current content and generation type.
|
||||
* Note: Table schema context is now fetched server-side in /api/wand for simplicity.
|
||||
*/
|
||||
function buildContextInfo(currentValue?: string, generationType?: string): string {
|
||||
if (!currentValue || currentValue.trim() === '') {
|
||||
return 'no current content'
|
||||
}
|
||||
function buildWandContextInfo({
|
||||
currentValue,
|
||||
generationType,
|
||||
}: BuildWandContextInfoOptions): string {
|
||||
const hasContent = Boolean(currentValue && currentValue.trim() !== '')
|
||||
const contentLength = currentValue?.length ?? 0
|
||||
const lineCount = currentValue ? currentValue.split('\n').length : 0
|
||||
|
||||
const contentLength = currentValue.length
|
||||
const lineCount = currentValue.split('\n').length
|
||||
let contextInfo = hasContent
|
||||
? `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
|
||||
: 'no current content'
|
||||
|
||||
let contextInfo = `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
|
||||
|
||||
if (generationType) {
|
||||
if (generationType && currentValue) {
|
||||
switch (generationType) {
|
||||
case 'javascript-function-body':
|
||||
case 'typescript-function-body': {
|
||||
@@ -32,6 +46,7 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
|
||||
|
||||
case 'json-schema':
|
||||
case 'json-object':
|
||||
case 'table-schema':
|
||||
try {
|
||||
const parsed = JSON.parse(currentValue)
|
||||
const keys = Object.keys(parsed)
|
||||
@@ -46,11 +61,6 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
|
||||
return contextInfo
|
||||
}
|
||||
|
||||
interface ChatMessage {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content: string
|
||||
}
|
||||
|
||||
export interface WandConfig {
|
||||
enabled: boolean
|
||||
prompt: string
|
||||
@@ -62,6 +72,9 @@ export interface WandConfig {
|
||||
interface UseWandProps {
|
||||
wandConfig?: WandConfig
|
||||
currentValue?: string
|
||||
contextParams?: {
|
||||
tableId?: string | null
|
||||
}
|
||||
onGeneratedContent: (content: string) => void
|
||||
onStreamChunk?: (chunk: string) => void
|
||||
onStreamStart?: () => void
|
||||
@@ -71,12 +84,14 @@ interface UseWandProps {
|
||||
export function useWand({
|
||||
wandConfig,
|
||||
currentValue,
|
||||
contextParams,
|
||||
onGeneratedContent,
|
||||
onStreamChunk,
|
||||
onStreamStart,
|
||||
onGenerationComplete,
|
||||
}: UseWandProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const workflowId = useWorkflowRegistry((state) => state.hydration.workflowId)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [isPromptVisible, setIsPromptVisible] = useState(false)
|
||||
const [promptInputValue, setPromptInputValue] = useState('')
|
||||
@@ -147,7 +162,10 @@ export function useWand({
|
||||
}
|
||||
|
||||
try {
|
||||
const contextInfo = buildContextInfo(currentValue, wandConfig?.generationType)
|
||||
const contextInfo = buildWandContextInfo({
|
||||
currentValue,
|
||||
generationType: wandConfig?.generationType,
|
||||
})
|
||||
|
||||
let systemPrompt = wandConfig?.prompt || ''
|
||||
if (systemPrompt.includes('{context}')) {
|
||||
@@ -170,6 +188,8 @@ export function useWand({
|
||||
stream: true,
|
||||
history: wandConfig?.maintainHistory ? conversationHistory : [],
|
||||
generationType: wandConfig?.generationType,
|
||||
workflowId,
|
||||
wandContext: contextParams?.tableId ? { tableId: contextParams.tableId } : undefined,
|
||||
}),
|
||||
signal: abortControllerRef.current.signal,
|
||||
cache: 'no-store',
|
||||
@@ -276,6 +296,8 @@ export function useWand({
|
||||
onStreamStart,
|
||||
onGenerationComplete,
|
||||
queryClient,
|
||||
contextParams?.tableId,
|
||||
workflowId,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings, Table } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
@@ -263,6 +263,12 @@ export const Sidebar = memo(function Sidebar() {
|
||||
href: `/workspace/${workspaceId}/knowledge`,
|
||||
hidden: permissionConfig.hideKnowledgeBaseTab,
|
||||
},
|
||||
{
|
||||
id: 'tables',
|
||||
label: 'Tables',
|
||||
icon: Table,
|
||||
href: `/workspace/${workspaceId}/tables`,
|
||||
},
|
||||
{
|
||||
id: 'help',
|
||||
label: 'Help',
|
||||
|
||||
@@ -409,6 +409,9 @@ describe('Blocks Module', () => {
|
||||
'workflow-input-mapper',
|
||||
'text',
|
||||
'router-input',
|
||||
'table-selector',
|
||||
'filter-builder',
|
||||
'sort-builder',
|
||||
]
|
||||
|
||||
const blocks = getAllBlocks()
|
||||
|
||||
679
apps/sim/blocks/blocks/table.ts
Normal file
679
apps/sim/blocks/blocks/table.ts
Normal file
@@ -0,0 +1,679 @@
|
||||
import { TableIcon } from '@/components/icons'
|
||||
import { TABLE_LIMITS } from '@/lib/table/constants'
|
||||
import { filterRulesToFilter, sortRulesToSort } from '@/lib/table/query-builder/converters'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { TableQueryResponse } from '@/tools/table/types'
|
||||
|
||||
/**
|
||||
* Parses a JSON string with helpful error messages.
|
||||
*
|
||||
* Handles common issues like unquoted block references in JSON values.
|
||||
*
|
||||
* @param value - The value to parse (string or already-parsed object)
|
||||
* @param fieldName - Name of the field for error messages
|
||||
* @returns Parsed JSON value
|
||||
* @throws Error with helpful hints if JSON is invalid
|
||||
*/
|
||||
function parseJSON(value: string | unknown, fieldName: string): unknown {
|
||||
if (typeof value !== 'string') return value
|
||||
|
||||
try {
|
||||
return JSON.parse(value)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
|
||||
// Check if the error might be due to unquoted string values
|
||||
// This happens when users write {"field": <ref>} instead of {"field": "<ref>"}
|
||||
const unquotedValueMatch = value.match(
|
||||
/:\s*([a-zA-Z][a-zA-Z0-9_\s]*[a-zA-Z0-9]|[a-zA-Z])\s*[,}]/
|
||||
)
|
||||
|
||||
let hint =
|
||||
'Make sure all property names are in double quotes (e.g., {"name": "value"} not {name: "value"}).'
|
||||
|
||||
if (unquotedValueMatch) {
|
||||
hint =
|
||||
'It looks like a string value is not quoted. When using block references in JSON, wrap them in double quotes: {"field": "<blockName.output>"} not {"field": <blockName.output>}.'
|
||||
}
|
||||
|
||||
throw new Error(`Invalid JSON in ${fieldName}: ${errorMsg}. ${hint}`)
|
||||
}
|
||||
}
|
||||
|
||||
/** Raw params from block UI before JSON parsing and type conversion */
|
||||
interface TableBlockParams {
|
||||
operation: string
|
||||
tableId?: string
|
||||
rowId?: string
|
||||
data?: string | unknown
|
||||
rows?: string | unknown
|
||||
filter?: string | unknown
|
||||
sort?: string | unknown
|
||||
limit?: string
|
||||
offset?: string
|
||||
builderMode?: string
|
||||
filterBuilder?: unknown
|
||||
sortBuilder?: unknown
|
||||
bulkFilterMode?: string
|
||||
bulkFilterBuilder?: unknown
|
||||
}
|
||||
|
||||
/** Normalized params after parsing, ready for tool request body */
|
||||
interface ParsedParams {
|
||||
tableId?: string
|
||||
rowId?: string
|
||||
data?: unknown
|
||||
rows?: unknown
|
||||
filter?: unknown
|
||||
sort?: unknown
|
||||
limit?: number
|
||||
offset?: number
|
||||
}
|
||||
|
||||
/** Transforms raw block params into tool request params for each operation */
|
||||
const paramTransformers: Record<string, (params: TableBlockParams) => ParsedParams> = {
|
||||
insert_row: (params) => ({
|
||||
tableId: params.tableId,
|
||||
data: parseJSON(params.data, 'Row Data'),
|
||||
}),
|
||||
|
||||
upsert_row: (params) => ({
|
||||
tableId: params.tableId,
|
||||
data: parseJSON(params.data, 'Row Data'),
|
||||
}),
|
||||
|
||||
batch_insert_rows: (params) => ({
|
||||
tableId: params.tableId,
|
||||
rows: parseJSON(params.rows, 'Rows Data'),
|
||||
}),
|
||||
|
||||
update_row: (params) => ({
|
||||
tableId: params.tableId,
|
||||
rowId: params.rowId,
|
||||
data: parseJSON(params.data, 'Row Data'),
|
||||
}),
|
||||
|
||||
update_rows_by_filter: (params) => {
|
||||
let filter: unknown
|
||||
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
|
||||
filter =
|
||||
filterRulesToFilter(
|
||||
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
|
||||
) || undefined
|
||||
} else if (params.filter) {
|
||||
filter = parseJSON(params.filter, 'Filter')
|
||||
}
|
||||
|
||||
return {
|
||||
tableId: params.tableId,
|
||||
filter,
|
||||
data: parseJSON(params.data, 'Row Data'),
|
||||
limit: params.limit ? Number.parseInt(params.limit) : undefined,
|
||||
}
|
||||
},
|
||||
|
||||
delete_row: (params) => ({
|
||||
tableId: params.tableId,
|
||||
rowId: params.rowId,
|
||||
}),
|
||||
|
||||
delete_rows_by_filter: (params) => {
|
||||
let filter: unknown
|
||||
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
|
||||
filter =
|
||||
filterRulesToFilter(
|
||||
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
|
||||
) || undefined
|
||||
} else if (params.filter) {
|
||||
filter = parseJSON(params.filter, 'Filter')
|
||||
}
|
||||
|
||||
return {
|
||||
tableId: params.tableId,
|
||||
filter,
|
||||
limit: params.limit ? Number.parseInt(params.limit) : undefined,
|
||||
}
|
||||
},
|
||||
|
||||
get_row: (params) => ({
|
||||
tableId: params.tableId,
|
||||
rowId: params.rowId,
|
||||
}),
|
||||
|
||||
get_schema: (params) => ({
|
||||
tableId: params.tableId,
|
||||
}),
|
||||
|
||||
query_rows: (params) => {
|
||||
let filter: unknown
|
||||
if (params.builderMode === 'builder' && params.filterBuilder) {
|
||||
filter =
|
||||
filterRulesToFilter(params.filterBuilder as Parameters<typeof filterRulesToFilter>[0]) ||
|
||||
undefined
|
||||
} else if (params.filter) {
|
||||
filter = parseJSON(params.filter, 'Filter')
|
||||
}
|
||||
|
||||
let sort: unknown
|
||||
if (params.builderMode === 'builder' && params.sortBuilder) {
|
||||
sort =
|
||||
sortRulesToSort(params.sortBuilder as Parameters<typeof sortRulesToSort>[0]) || undefined
|
||||
} else if (params.sort) {
|
||||
sort = parseJSON(params.sort, 'Sort')
|
||||
}
|
||||
|
||||
return {
|
||||
tableId: params.tableId,
|
||||
filter,
|
||||
sort,
|
||||
limit: params.limit ? Number.parseInt(params.limit) : 100,
|
||||
offset: params.offset ? Number.parseInt(params.offset) : 0,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export const TableBlock: BlockConfig<TableQueryResponse> = {
|
||||
type: 'table',
|
||||
name: 'Table',
|
||||
description: 'User-defined data tables',
|
||||
longDescription:
|
||||
'Create and manage custom data tables. Store, query, and manipulate structured data within workflows.',
|
||||
docsLink: 'https://docs.simstudio.ai/tools/table',
|
||||
category: 'blocks',
|
||||
bgColor: '#10B981',
|
||||
icon: TableIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Query Rows', id: 'query_rows' },
|
||||
{ label: 'Insert Row', id: 'insert_row' },
|
||||
{ label: 'Upsert Row', id: 'upsert_row' },
|
||||
{ label: 'Batch Insert Rows', id: 'batch_insert_rows' },
|
||||
{ label: 'Update Rows by Filter', id: 'update_rows_by_filter' },
|
||||
{ label: 'Delete Rows by Filter', id: 'delete_rows_by_filter' },
|
||||
{ label: 'Update Row by ID', id: 'update_row' },
|
||||
{ label: 'Delete Row by ID', id: 'delete_row' },
|
||||
{ label: 'Get Row by ID', id: 'get_row' },
|
||||
{ label: 'Get Schema', id: 'get_schema' },
|
||||
],
|
||||
value: () => 'query_rows',
|
||||
},
|
||||
|
||||
// Table selector (for all operations)
|
||||
{
|
||||
id: 'tableId',
|
||||
title: 'Table',
|
||||
type: 'table-selector',
|
||||
placeholder: 'Select a table',
|
||||
required: true,
|
||||
},
|
||||
|
||||
// Row ID for get/update/delete
|
||||
{
|
||||
id: 'rowId',
|
||||
title: 'Row ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'row_xxxxx',
|
||||
condition: { field: 'operation', value: ['get_row', 'update_row', 'delete_row'] },
|
||||
required: true,
|
||||
},
|
||||
|
||||
// Insert/Update/Upsert Row data (single row)
|
||||
{
|
||||
id: 'data',
|
||||
title: 'Row Data (JSON)',
|
||||
type: 'code',
|
||||
placeholder: '{"column_name": "value"}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['insert_row', 'upsert_row', 'update_row', 'update_rows_by_filter'],
|
||||
},
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `Generate row data as a JSON object matching the table's column schema.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### INSTRUCTION
|
||||
Return ONLY a valid JSON object with field values based on the table's columns. No explanations or markdown.
|
||||
|
||||
IMPORTANT: Reference the table schema visible in the table selector to know which columns exist and their types.
|
||||
|
||||
### EXAMPLES
|
||||
|
||||
Table with columns: email (string), name (string), age (number)
|
||||
"user with email john@example.com and age 25"
|
||||
→ {"email": "john@example.com", "name": "John", "age": 25}
|
||||
|
||||
Table with columns: customer_id (string), total (number), status (string)
|
||||
"order with customer ID 123, total 99.99, status pending"
|
||||
→ {"customer_id": "123", "total": 99.99, "status": "pending"}
|
||||
|
||||
Return ONLY the data JSON:`,
|
||||
generationType: 'table-schema',
|
||||
},
|
||||
},
|
||||
|
||||
// Batch Insert - multiple rows
|
||||
{
|
||||
id: 'rows',
|
||||
title: 'Rows Data (Array of JSON)',
|
||||
type: 'code',
|
||||
placeholder: '[{"col1": "val1"}, {"col1": "val2"}]',
|
||||
condition: { field: 'operation', value: 'batch_insert_rows' },
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `Generate an array of row data objects matching the table's column schema.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### INSTRUCTION
|
||||
Return ONLY a valid JSON array of objects. Each object represents one row. No explanations or markdown.
|
||||
Maximum ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows per batch.
|
||||
|
||||
IMPORTANT: Reference the table schema to know which columns exist and their types.
|
||||
|
||||
### EXAMPLES
|
||||
|
||||
Table with columns: email (string), name (string), age (number)
|
||||
"3 users: john@example.com age 25, jane@example.com age 30, bob@example.com age 28"
|
||||
→ [
|
||||
{"email": "john@example.com", "name": "John", "age": 25},
|
||||
{"email": "jane@example.com", "name": "Jane", "age": 30},
|
||||
{"email": "bob@example.com", "name": "Bob", "age": 28}
|
||||
]
|
||||
|
||||
Return ONLY the rows array:`,
|
||||
generationType: 'table-schema',
|
||||
},
|
||||
},
|
||||
|
||||
// Filter mode selector for bulk operations
|
||||
{
|
||||
id: 'bulkFilterMode',
|
||||
title: 'Filter Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Builder', id: 'builder' },
|
||||
{ label: 'Editor', id: 'json' },
|
||||
],
|
||||
value: () => 'builder',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
|
||||
},
|
||||
},
|
||||
|
||||
// Filter builder for bulk operations (visual)
|
||||
{
|
||||
id: 'bulkFilterBuilder',
|
||||
title: 'Filter Conditions',
|
||||
type: 'filter-builder',
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
|
||||
},
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
|
||||
and: { field: 'bulkFilterMode', value: 'builder' },
|
||||
},
|
||||
},
|
||||
|
||||
// Filter for update/delete operations (JSON editor - bulk ops)
|
||||
{
|
||||
id: 'filter',
|
||||
title: 'Filter',
|
||||
type: 'code',
|
||||
placeholder: '{"column_name": {"$eq": "value"}}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
|
||||
and: { field: 'bulkFilterMode', value: 'json' },
|
||||
},
|
||||
required: true,
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `Generate filter criteria for selecting rows in a table.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### INSTRUCTION
|
||||
Return ONLY a valid JSON filter object. No explanations or markdown.
|
||||
|
||||
IMPORTANT: Reference the table schema to know which columns exist and their types.
|
||||
|
||||
### OPERATORS
|
||||
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
|
||||
- **$ne**: Not equals - {"column": {"$ne": "value"}}
|
||||
- **$gt**: Greater than - {"column": {"$gt": 18}}
|
||||
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
|
||||
- **$lt**: Less than - {"column": {"$lt": 90}}
|
||||
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
|
||||
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
|
||||
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
|
||||
- **$contains**: String contains - {"column": {"$contains": "text"}}
|
||||
|
||||
### EXAMPLES
|
||||
|
||||
"rows where status is active"
|
||||
→ {"status": "active"}
|
||||
|
||||
"rows where age is over 18 and status is pending"
|
||||
→ {"age": {"$gte": 18}, "status": "pending"}
|
||||
|
||||
"rows where email contains gmail.com"
|
||||
→ {"email": {"$contains": "gmail.com"}}
|
||||
|
||||
Return ONLY the filter JSON:`,
|
||||
generationType: 'table-schema',
|
||||
},
|
||||
},
|
||||
|
||||
// Builder mode selector for query_rows (controls both filter and sort)
|
||||
{
|
||||
id: 'builderMode',
|
||||
title: 'Input Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Builder', id: 'builder' },
|
||||
{ label: 'Editor', id: 'json' },
|
||||
],
|
||||
value: () => 'builder',
|
||||
condition: { field: 'operation', value: 'query_rows' },
|
||||
},
|
||||
|
||||
// Filter builder (visual)
|
||||
{
|
||||
id: 'filterBuilder',
|
||||
title: 'Filter Conditions',
|
||||
type: 'filter-builder',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'query_rows',
|
||||
and: { field: 'builderMode', value: 'builder' },
|
||||
},
|
||||
},
|
||||
|
||||
// Sort builder (visual)
|
||||
{
|
||||
id: 'sortBuilder',
|
||||
title: 'Sort Order',
|
||||
type: 'sort-builder',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'query_rows',
|
||||
and: { field: 'builderMode', value: 'builder' },
|
||||
},
|
||||
},
|
||||
|
||||
// Filter for query_rows (JSON editor mode or tool call context)
|
||||
{
|
||||
id: 'filter',
|
||||
title: 'Filter',
|
||||
type: 'code',
|
||||
placeholder: '{"column_name": {"$eq": "value"}}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'query_rows',
|
||||
and: { field: 'builderMode', value: 'builder', not: true },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `Generate filter criteria for selecting rows in a table.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### INSTRUCTION
|
||||
Return ONLY a valid JSON filter object. No explanations or markdown.
|
||||
|
||||
IMPORTANT: Reference the table schema to know which columns exist and their types.
|
||||
|
||||
### OPERATORS
|
||||
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
|
||||
- **$ne**: Not equals - {"column": {"$ne": "value"}}
|
||||
- **$gt**: Greater than - {"column": {"$gt": 18}}
|
||||
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
|
||||
- **$lt**: Less than - {"column": {"$lt": 90}}
|
||||
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
|
||||
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
|
||||
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
|
||||
- **$contains**: String contains - {"column": {"$contains": "text"}}
|
||||
|
||||
### EXAMPLES
|
||||
|
||||
"rows where status is active"
|
||||
→ {"status": "active"}
|
||||
|
||||
"rows where age is over 18 and status is pending"
|
||||
→ {"age": {"$gte": 18}, "status": "pending"}
|
||||
|
||||
"rows where email contains gmail.com"
|
||||
→ {"email": {"$contains": "gmail.com"}}
|
||||
|
||||
Return ONLY the filter JSON:`,
|
||||
generationType: 'table-schema',
|
||||
},
|
||||
},
|
||||
|
||||
// Sort (JSON editor or tool call context)
|
||||
{
|
||||
id: 'sort',
|
||||
title: 'Sort',
|
||||
type: 'code',
|
||||
placeholder: '{"column_name": "desc"}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'query_rows',
|
||||
and: { field: 'builderMode', value: 'builder', not: true },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `Generate sort order for table query results.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### INSTRUCTION
|
||||
Return ONLY a valid JSON object specifying sort order. No explanations or markdown.
|
||||
|
||||
IMPORTANT: Reference the table schema to know which columns exist. You can sort by any column or the built-in columns (createdAt, updatedAt).
|
||||
|
||||
### FORMAT
|
||||
{"column_name": "asc" or "desc"}
|
||||
|
||||
You can specify multiple columns for multi-level sorting.
|
||||
|
||||
### EXAMPLES
|
||||
|
||||
Table with columns: name (string), age (number), email (string), createdAt (date)
|
||||
|
||||
"sort by newest first"
|
||||
→ {"createdAt": "desc"}
|
||||
|
||||
"sort by name alphabetically"
|
||||
→ {"name": "asc"}
|
||||
|
||||
"sort by age descending"
|
||||
→ {"age": "desc"}
|
||||
|
||||
"sort by age descending, then name ascending"
|
||||
→ {"age": "desc", "name": "asc"}
|
||||
|
||||
"sort by oldest created first"
|
||||
→ {"createdAt": "asc"}
|
||||
|
||||
Return ONLY the sort JSON:`,
|
||||
generationType: 'table-schema',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['query_rows', 'update_rows_by_filter', 'delete_rows_by_filter'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'query_rows' },
|
||||
value: () => '0',
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'table_insert_row',
|
||||
'table_batch_insert_rows',
|
||||
'table_upsert_row',
|
||||
'table_update_row',
|
||||
'table_update_rows_by_filter',
|
||||
'table_delete_row',
|
||||
'table_delete_rows_by_filter',
|
||||
'table_query_rows',
|
||||
'table_get_row',
|
||||
'table_get_schema',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
const toolMap: Record<string, string> = {
|
||||
insert_row: 'table_insert_row',
|
||||
batch_insert_rows: 'table_batch_insert_rows',
|
||||
upsert_row: 'table_upsert_row',
|
||||
update_row: 'table_update_row',
|
||||
update_rows_by_filter: 'table_update_rows_by_filter',
|
||||
delete_row: 'table_delete_row',
|
||||
delete_rows_by_filter: 'table_delete_rows_by_filter',
|
||||
query_rows: 'table_query_rows',
|
||||
get_row: 'table_get_row',
|
||||
get_schema: 'table_get_schema',
|
||||
}
|
||||
return toolMap[params.operation] || 'table_query_rows'
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, ...rest } = params
|
||||
const transformer = paramTransformers[operation]
|
||||
|
||||
if (transformer) {
|
||||
return transformer(rest as TableBlockParams)
|
||||
}
|
||||
|
||||
return rest
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Table operation to perform' },
|
||||
tableId: { type: 'string', description: 'Table identifier' },
|
||||
data: { type: 'json', description: 'Row data for insert/update' },
|
||||
rows: { type: 'array', description: 'Array of row data for batch insert' },
|
||||
rowId: { type: 'string', description: 'Row identifier for ID-based operations' },
|
||||
bulkFilterMode: {
|
||||
type: 'string',
|
||||
description: 'Filter input mode for bulk operations (builder or json)',
|
||||
},
|
||||
bulkFilterBuilder: {
|
||||
type: 'json',
|
||||
description: 'Visual filter builder conditions for bulk operations',
|
||||
},
|
||||
filter: { type: 'json', description: 'Filter criteria for query/update/delete operations' },
|
||||
limit: { type: 'number', description: 'Query or bulk operation limit' },
|
||||
builderMode: {
|
||||
type: 'string',
|
||||
description: 'Input mode for filter and sort (builder or json)',
|
||||
},
|
||||
filterBuilder: { type: 'json', description: 'Visual filter builder conditions' },
|
||||
sortBuilder: { type: 'json', description: 'Visual sort builder conditions' },
|
||||
sort: { type: 'json', description: 'Sort order (JSON)' },
|
||||
offset: { type: 'number', description: 'Query result offset' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
success: { type: 'boolean', description: 'Operation success status' },
|
||||
row: {
|
||||
type: 'json',
|
||||
description: 'Single row data',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_row', 'insert_row', 'upsert_row', 'update_row'],
|
||||
},
|
||||
},
|
||||
operation: {
|
||||
type: 'string',
|
||||
description: 'Operation performed (insert or update)',
|
||||
condition: { field: 'operation', value: 'upsert_row' },
|
||||
},
|
||||
rows: {
|
||||
type: 'array',
|
||||
description: 'Array of rows',
|
||||
condition: { field: 'operation', value: ['query_rows', 'batch_insert_rows'] },
|
||||
},
|
||||
rowCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows returned',
|
||||
condition: { field: 'operation', value: 'query_rows' },
|
||||
},
|
||||
totalCount: {
|
||||
type: 'number',
|
||||
description: 'Total rows matching filter',
|
||||
condition: { field: 'operation', value: 'query_rows' },
|
||||
},
|
||||
insertedCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows inserted',
|
||||
condition: { field: 'operation', value: 'batch_insert_rows' },
|
||||
},
|
||||
updatedCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows updated',
|
||||
condition: { field: 'operation', value: 'update_rows_by_filter' },
|
||||
},
|
||||
updatedRowIds: {
|
||||
type: 'array',
|
||||
description: 'IDs of updated rows',
|
||||
condition: { field: 'operation', value: 'update_rows_by_filter' },
|
||||
},
|
||||
deletedCount: {
|
||||
type: 'number',
|
||||
description: 'Number of rows deleted',
|
||||
condition: { field: 'operation', value: ['delete_row', 'delete_rows_by_filter'] },
|
||||
},
|
||||
deletedRowIds: {
|
||||
type: 'array',
|
||||
description: 'IDs of deleted rows',
|
||||
condition: { field: 'operation', value: 'delete_rows_by_filter' },
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Table name',
|
||||
condition: { field: 'operation', value: 'get_schema' },
|
||||
},
|
||||
columns: {
|
||||
type: 'array',
|
||||
description: 'Column definitions',
|
||||
condition: { field: 'operation', value: 'get_schema' },
|
||||
},
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
},
|
||||
}
|
||||
@@ -121,6 +121,7 @@ import { StarterBlock } from '@/blocks/blocks/starter'
|
||||
import { StripeBlock } from '@/blocks/blocks/stripe'
|
||||
import { SttBlock } from '@/blocks/blocks/stt'
|
||||
import { SupabaseBlock } from '@/blocks/blocks/supabase'
|
||||
import { TableBlock } from '@/blocks/blocks/table'
|
||||
import { TavilyBlock } from '@/blocks/blocks/tavily'
|
||||
import { TelegramBlock } from '@/blocks/blocks/telegram'
|
||||
import { TextractBlock } from '@/blocks/blocks/textract'
|
||||
@@ -288,6 +289,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
stripe: StripeBlock,
|
||||
stt: SttBlock,
|
||||
supabase: SupabaseBlock,
|
||||
table: TableBlock,
|
||||
tavily: TavilyBlock,
|
||||
telegram: TelegramBlock,
|
||||
textract: TextractBlock,
|
||||
|
||||
@@ -26,6 +26,7 @@ export type GenerationType =
|
||||
| 'typescript-function-body'
|
||||
| 'json-schema'
|
||||
| 'json-object'
|
||||
| 'table-schema'
|
||||
| 'system-prompt'
|
||||
| 'custom-tool-schema'
|
||||
| 'sql-query'
|
||||
@@ -72,6 +73,8 @@ export type SubBlockType =
|
||||
| 'mcp-dynamic-args' // MCP dynamic arguments based on tool schema
|
||||
| 'input-format' // Input structure format
|
||||
| 'response-format' // Response structure format
|
||||
| 'filter-builder' // Filter conditions builder
|
||||
| 'sort-builder' // Sort conditions builder
|
||||
/**
|
||||
* @deprecated Legacy trigger save subblock type.
|
||||
*/
|
||||
@@ -84,6 +87,7 @@ export type SubBlockType =
|
||||
| 'workflow-input-mapper' // Dynamic workflow input mapper based on selected workflow
|
||||
| 'text' // Read-only text display
|
||||
| 'router-input' // Router route definitions with descriptions
|
||||
| 'table-selector' // Table selector with link to view table
|
||||
|
||||
/**
|
||||
* Selector types that require display name hydration
|
||||
@@ -103,6 +107,7 @@ export const SELECTOR_TYPES_HYDRATION_REQUIRED: SubBlockType[] = [
|
||||
'variables-input',
|
||||
'mcp-server-selector',
|
||||
'mcp-tool-selector',
|
||||
'table-selector',
|
||||
] as const
|
||||
|
||||
export type ExtractToolOutput<T> = T extends ToolResponse ? T['output'] : never
|
||||
|
||||
@@ -4696,6 +4696,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TableIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth={2}
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
{...props}
|
||||
>
|
||||
<rect width='18' height='18' x='3' y='3' rx='2' />
|
||||
<path d='M3 9h18' />
|
||||
<path d='M3 15h18' />
|
||||
<path d='M9 3v18' />
|
||||
<path d='M15 3v18' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -73,6 +73,7 @@ const DialogContent = React.forwardRef<
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
<DialogPrimitive.Title>Dialog</DialogPrimitive.Title>
|
||||
{children}
|
||||
{!hideCloseButton && (
|
||||
<DialogPrimitive.Close
|
||||
|
||||
97
apps/sim/hooks/queries/use-tables.ts
Normal file
97
apps/sim/hooks/queries/use-tables.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* React Query hooks for managing user-defined tables.
|
||||
*/
|
||||
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { TableDefinition } from '@/lib/table'
|
||||
|
||||
export const tableKeys = {
|
||||
all: ['tables'] as const,
|
||||
lists: () => [...tableKeys.all, 'list'] as const,
|
||||
list: (workspaceId?: string) => [...tableKeys.lists(), workspaceId ?? ''] as const,
|
||||
details: () => [...tableKeys.all, 'detail'] as const,
|
||||
detail: (tableId: string) => [...tableKeys.details(), tableId] as const,
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all tables for a workspace.
|
||||
*/
|
||||
export function useTablesList(workspaceId?: string) {
|
||||
return useQuery({
|
||||
queryKey: tableKeys.list(workspaceId),
|
||||
queryFn: async () => {
|
||||
if (!workspaceId) throw new Error('Workspace ID required')
|
||||
|
||||
const res = await fetch(`/api/table?workspaceId=${encodeURIComponent(workspaceId)}`)
|
||||
|
||||
if (!res.ok) {
|
||||
const error = await res.json()
|
||||
throw new Error(error.error || 'Failed to fetch tables')
|
||||
}
|
||||
|
||||
const response = await res.json()
|
||||
return (response.data?.tables || []) as TableDefinition[]
|
||||
},
|
||||
enabled: Boolean(workspaceId),
|
||||
staleTime: 30 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new table in a workspace.
|
||||
*/
|
||||
export function useCreateTable(workspaceId: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (params: {
|
||||
name: string
|
||||
description?: string
|
||||
schema: { columns: Array<{ name: string; type: string; required?: boolean }> }
|
||||
}) => {
|
||||
const res = await fetch('/api/table', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ ...params, workspaceId }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const error = await res.json()
|
||||
throw new Error(error.error || 'Failed to create table')
|
||||
}
|
||||
|
||||
return res.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a table from a workspace.
|
||||
*/
|
||||
export function useDeleteTable(workspaceId: string) {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (tableId: string) => {
|
||||
const res = await fetch(
|
||||
`/api/table/${tableId}?workspaceId=${encodeURIComponent(workspaceId)}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!res.ok) {
|
||||
const error = await res.json()
|
||||
throw new Error(error.error || 'Failed to delete table')
|
||||
}
|
||||
|
||||
return res.json()
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
303
apps/sim/lib/table/__tests__/sql.test.ts
Normal file
303
apps/sim/lib/table/__tests__/sql.test.ts
Normal file
@@ -0,0 +1,303 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*
|
||||
* SQL Builder Unit Tests
|
||||
*
|
||||
* Tests for the table SQL query builder utilities including filter and sort clause generation.
|
||||
*/
|
||||
import { drizzleOrmMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm', () => drizzleOrmMock)
|
||||
|
||||
import { buildFilterClause, buildSortClause } from '../sql'
|
||||
import type { Filter } from '../types'
|
||||
|
||||
describe('SQL Builder', () => {
|
||||
describe('buildFilterClause', () => {
|
||||
const tableName = 'user_table_rows'
|
||||
|
||||
it('should return undefined for empty filter', () => {
|
||||
const result = buildFilterClause({}, tableName)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should handle simple equality filter', () => {
|
||||
const filter: Filter = { name: 'John' }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $eq operator', () => {
|
||||
const filter: Filter = { status: { $eq: 'active' } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $ne operator', () => {
|
||||
const filter: Filter = { status: { $ne: 'deleted' } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $gt operator', () => {
|
||||
const filter: Filter = { age: { $gt: 18 } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $gte operator', () => {
|
||||
const filter: Filter = { age: { $gte: 18 } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $lt operator', () => {
|
||||
const filter: Filter = { age: { $lt: 65 } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $lte operator', () => {
|
||||
const filter: Filter = { age: { $lte: 65 } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $in operator with single value', () => {
|
||||
const filter: Filter = { status: { $in: ['active'] } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $in operator with multiple values', () => {
|
||||
const filter: Filter = { status: { $in: ['active', 'pending'] } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $nin operator', () => {
|
||||
const filter: Filter = { status: { $nin: ['deleted', 'archived'] } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $contains operator', () => {
|
||||
const filter: Filter = { name: { $contains: 'john' } }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $or logical operator', () => {
|
||||
const filter: Filter = {
|
||||
$or: [{ status: 'active' }, { status: 'pending' }],
|
||||
}
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle $and logical operator', () => {
|
||||
const filter: Filter = {
|
||||
$and: [{ status: 'active' }, { age: { $gt: 18 } }],
|
||||
}
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle multiple conditions combined with AND', () => {
|
||||
const filter: Filter = {
|
||||
status: 'active',
|
||||
age: { $gt: 18 },
|
||||
}
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle nested $or and $and', () => {
|
||||
const filter: Filter = {
|
||||
$or: [{ $and: [{ status: 'active' }, { verified: true }] }, { role: 'admin' }],
|
||||
}
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should throw error for invalid field name', () => {
|
||||
const filter: Filter = { 'invalid-field': 'value' }
|
||||
|
||||
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
|
||||
})
|
||||
|
||||
it('should throw error for invalid operator', () => {
|
||||
const filter = { name: { $invalid: 'value' } } as unknown as Filter
|
||||
|
||||
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid operator')
|
||||
})
|
||||
|
||||
it('should skip undefined values', () => {
|
||||
const filter: Filter = { name: undefined, status: 'active' }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle boolean values', () => {
|
||||
const filter: Filter = { active: true }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle null values', () => {
|
||||
const filter: Filter = { deleted_at: null }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle numeric values', () => {
|
||||
const filter: Filter = { count: 42 }
|
||||
const result = buildFilterClause(filter, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildSortClause', () => {
|
||||
const tableName = 'user_table_rows'
|
||||
|
||||
it('should return undefined for empty sort', () => {
|
||||
const result = buildSortClause({}, tableName)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should handle single field ascending sort', () => {
|
||||
const sort = { name: 'asc' as const }
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle single field descending sort', () => {
|
||||
const sort = { name: 'desc' as const }
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle multiple fields sort', () => {
|
||||
const sort = { name: 'asc' as const, created_at: 'desc' as const }
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle createdAt field directly', () => {
|
||||
const sort = { createdAt: 'desc' as const }
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle updatedAt field directly', () => {
|
||||
const sort = { updatedAt: 'asc' as const }
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should throw error for invalid field name', () => {
|
||||
const sort = { 'invalid-field': 'asc' as const }
|
||||
|
||||
expect(() => buildSortClause(sort, tableName)).toThrow('Invalid field name')
|
||||
})
|
||||
|
||||
it('should throw error for invalid direction', () => {
|
||||
const sort = { name: 'invalid' as 'asc' | 'desc' }
|
||||
|
||||
expect(() => buildSortClause(sort, tableName)).toThrow('Invalid sort direction')
|
||||
})
|
||||
|
||||
it('should handle numeric column type for proper numeric sorting', () => {
|
||||
const sort = { salary: 'desc' as const }
|
||||
const columns = [{ name: 'salary', type: 'number' as const }]
|
||||
const result = buildSortClause(sort, tableName, columns)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should handle date column type for chronological sorting', () => {
|
||||
const sort = { birthDate: 'asc' as const }
|
||||
const columns = [{ name: 'birthDate', type: 'date' as const }]
|
||||
const result = buildSortClause(sort, tableName, columns)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should use text sorting for string columns', () => {
|
||||
const sort = { name: 'asc' as const }
|
||||
const columns = [{ name: 'name', type: 'string' as const }]
|
||||
const result = buildSortClause(sort, tableName, columns)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
|
||||
it('should fall back to text sorting when column type is unknown', () => {
|
||||
const sort = { unknownField: 'asc' as const }
|
||||
// No columns provided
|
||||
const result = buildSortClause(sort, tableName)
|
||||
|
||||
expect(result).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Field Name Validation', () => {
|
||||
const tableName = 'user_table_rows'
|
||||
|
||||
it('should accept valid field names', () => {
|
||||
const validNames = ['name', 'user_id', '_private', 'Count123', 'a']
|
||||
|
||||
for (const name of validNames) {
|
||||
const filter: Filter = { [name]: 'value' }
|
||||
expect(() => buildFilterClause(filter, tableName)).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject field names starting with number', () => {
|
||||
const filter: Filter = { '123name': 'value' }
|
||||
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
|
||||
})
|
||||
|
||||
it('should reject field names with special characters', () => {
|
||||
const invalidNames = ['field-name', 'field.name', 'field name', 'field@name']
|
||||
|
||||
for (const name of invalidNames) {
|
||||
const filter: Filter = { [name]: 'value' }
|
||||
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject SQL injection attempts', () => {
|
||||
const sqlInjectionAttempts = ["'; DROP TABLE users; --", 'name OR 1=1', 'name; DELETE FROM']
|
||||
|
||||
for (const attempt of sqlInjectionAttempts) {
|
||||
const filter: Filter = { [attempt]: 'value' }
|
||||
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
366
apps/sim/lib/table/__tests__/validation.test.ts
Normal file
366
apps/sim/lib/table/__tests__/validation.test.ts
Normal file
@@ -0,0 +1,366 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { TABLE_LIMITS } from '../constants'
|
||||
import {
|
||||
type ColumnDefinition,
|
||||
getUniqueColumns,
|
||||
type TableSchema,
|
||||
validateColumnDefinition,
|
||||
validateRowAgainstSchema,
|
||||
validateRowSize,
|
||||
validateTableName,
|
||||
validateTableSchema,
|
||||
validateUniqueConstraints,
|
||||
} from '../validation'
|
||||
|
||||
describe('Validation', () => {
|
||||
describe('validateTableName', () => {
|
||||
it('should accept valid table names', () => {
|
||||
const validNames = ['users', 'user_data', '_private', 'Users123', 'a']
|
||||
|
||||
for (const name of validNames) {
|
||||
const result = validateTableName(name)
|
||||
expect(result.valid).toBe(true)
|
||||
expect(result.errors).toHaveLength(0)
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject empty name', () => {
|
||||
const result = validateTableName('')
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Table name is required')
|
||||
})
|
||||
|
||||
it('should reject null/undefined name', () => {
|
||||
const result1 = validateTableName(null as unknown as string)
|
||||
expect(result1.valid).toBe(false)
|
||||
|
||||
const result2 = validateTableName(undefined as unknown as string)
|
||||
expect(result2.valid).toBe(false)
|
||||
})
|
||||
|
||||
it('should reject names starting with number', () => {
|
||||
const result = validateTableName('123table')
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must start with letter or underscore')
|
||||
})
|
||||
|
||||
it('should reject names with special characters', () => {
|
||||
const invalidNames = ['table-name', 'table.name', 'table name', 'table@name']
|
||||
|
||||
for (const name of invalidNames) {
|
||||
const result = validateTableName(name)
|
||||
expect(result.valid).toBe(false)
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject names exceeding max length', () => {
|
||||
const longName = 'a'.repeat(TABLE_LIMITS.MAX_TABLE_NAME_LENGTH + 1)
|
||||
const result = validateTableName(longName)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('exceeds maximum length')
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateColumnDefinition', () => {
|
||||
it('should accept valid column definition', () => {
|
||||
const column: ColumnDefinition = {
|
||||
name: 'email',
|
||||
type: 'string',
|
||||
required: true,
|
||||
unique: true,
|
||||
}
|
||||
const result = validateColumnDefinition(column)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should accept all valid column types', () => {
|
||||
const types = ['string', 'number', 'boolean', 'date', 'json'] as const
|
||||
|
||||
for (const type of types) {
|
||||
const result = validateColumnDefinition({ name: 'test', type })
|
||||
expect(result.valid).toBe(true)
|
||||
}
|
||||
})
|
||||
|
||||
it('should reject empty column name', () => {
|
||||
const result = validateColumnDefinition({ name: '', type: 'string' })
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Column name is required')
|
||||
})
|
||||
|
||||
it('should reject invalid column type', () => {
|
||||
const result = validateColumnDefinition({
|
||||
name: 'test',
|
||||
type: 'invalid' as any,
|
||||
})
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('invalid type')
|
||||
})
|
||||
|
||||
it('should reject column name exceeding max length', () => {
|
||||
const longName = 'a'.repeat(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH + 1)
|
||||
const result = validateColumnDefinition({ name: longName, type: 'string' })
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('exceeds maximum length')
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateTableSchema', () => {
|
||||
it('should accept valid schema', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'id', type: 'string', required: true, unique: true },
|
||||
{ name: 'name', type: 'string', required: true },
|
||||
{ name: 'age', type: 'number' },
|
||||
],
|
||||
}
|
||||
const result = validateTableSchema(schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject empty columns array', () => {
|
||||
const schema: TableSchema = { columns: [] }
|
||||
const result = validateTableSchema(schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Schema must have at least one column')
|
||||
})
|
||||
|
||||
it('should reject duplicate column names', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'id', type: 'string' },
|
||||
{ name: 'ID', type: 'number' },
|
||||
],
|
||||
}
|
||||
const result = validateTableSchema(schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Duplicate column names found')
|
||||
})
|
||||
|
||||
it('should reject null schema', () => {
|
||||
const result = validateTableSchema(null as unknown as TableSchema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Schema is required')
|
||||
})
|
||||
|
||||
it('should reject schema without columns array', () => {
|
||||
const result = validateTableSchema({} as TableSchema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Schema must have columns array')
|
||||
})
|
||||
|
||||
it('should reject schema exceeding max columns', () => {
|
||||
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) => ({
|
||||
name: `col_${i}`,
|
||||
type: 'string' as const,
|
||||
}))
|
||||
const result = validateTableSchema({ columns })
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('exceeds maximum columns')
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateRowSize', () => {
|
||||
it('should accept row within size limit', () => {
|
||||
const data = { name: 'test', value: 123 }
|
||||
const result = validateRowSize(data)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject row exceeding size limit', () => {
|
||||
const largeString = 'a'.repeat(TABLE_LIMITS.MAX_ROW_SIZE_BYTES + 1)
|
||||
const data = { content: largeString }
|
||||
const result = validateRowSize(data)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('exceeds limit')
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateRowAgainstSchema', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'name', type: 'string', required: true },
|
||||
{ name: 'age', type: 'number' },
|
||||
{ name: 'active', type: 'boolean' },
|
||||
{ name: 'created', type: 'date' },
|
||||
{ name: 'metadata', type: 'json' },
|
||||
],
|
||||
}
|
||||
|
||||
it('should accept valid row data', () => {
|
||||
const data = {
|
||||
name: 'John',
|
||||
age: 30,
|
||||
active: true,
|
||||
created: '2024-01-01',
|
||||
metadata: { key: 'value' },
|
||||
}
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject missing required field', () => {
|
||||
const data = { age: 30 }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toContain('Missing required field: name')
|
||||
})
|
||||
|
||||
it('should reject wrong type for string field', () => {
|
||||
const data = { name: 123 }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be string')
|
||||
})
|
||||
|
||||
it('should reject wrong type for number field', () => {
|
||||
const data = { name: 'John', age: 'thirty' }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be number')
|
||||
})
|
||||
|
||||
it('should reject NaN for number field', () => {
|
||||
const data = { name: 'John', age: Number.NaN }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be number')
|
||||
})
|
||||
|
||||
it('should reject wrong type for boolean field', () => {
|
||||
const data = { name: 'John', active: 'yes' }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be boolean')
|
||||
})
|
||||
|
||||
it('should reject invalid date string', () => {
|
||||
const data = { name: 'John', created: 'not-a-date' }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be valid date')
|
||||
})
|
||||
|
||||
it('should accept valid ISO date string', () => {
|
||||
const data = { name: 'John', created: '2024-01-15T10:30:00Z' }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should accept Date object', () => {
|
||||
const data = { name: 'John', created: new Date() }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow null for optional fields', () => {
|
||||
const data = { name: 'John', age: null }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow undefined for optional fields', () => {
|
||||
const data = { name: 'John' }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject string exceeding max length', () => {
|
||||
const longString = 'a'.repeat(TABLE_LIMITS.MAX_STRING_VALUE_LENGTH + 1)
|
||||
const data = { name: longString }
|
||||
const result = validateRowAgainstSchema(data, schema)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('exceeds max string length')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUniqueColumns', () => {
|
||||
it('should return only columns with unique=true', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'id', type: 'string', unique: true },
|
||||
{ name: 'email', type: 'string', unique: true },
|
||||
{ name: 'name', type: 'string' },
|
||||
{ name: 'count', type: 'number', unique: false },
|
||||
],
|
||||
}
|
||||
const result = getUniqueColumns(schema)
|
||||
expect(result).toHaveLength(2)
|
||||
expect(result.map((c) => c.name)).toEqual(['id', 'email'])
|
||||
})
|
||||
|
||||
it('should return empty array when no unique columns', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'name', type: 'string' },
|
||||
{ name: 'value', type: 'number' },
|
||||
],
|
||||
}
|
||||
const result = getUniqueColumns(schema)
|
||||
expect(result).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateUniqueConstraints', () => {
|
||||
const schema: TableSchema = {
|
||||
columns: [
|
||||
{ name: 'id', type: 'string', unique: true },
|
||||
{ name: 'email', type: 'string', unique: true },
|
||||
{ name: 'name', type: 'string' },
|
||||
],
|
||||
}
|
||||
|
||||
const existingRows = [
|
||||
{ id: 'row1', data: { id: 'abc123', email: 'john@example.com', name: 'John' } },
|
||||
{ id: 'row2', data: { id: 'def456', email: 'jane@example.com', name: 'Jane' } },
|
||||
]
|
||||
|
||||
it('should accept data with unique values', () => {
|
||||
const data = { id: 'xyz789', email: 'new@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject duplicate unique value', () => {
|
||||
const data = { id: 'abc123', email: 'new@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors[0]).toContain('must be unique')
|
||||
expect(result.errors[0]).toContain('abc123')
|
||||
})
|
||||
|
||||
it('should be case-insensitive for string comparisons', () => {
|
||||
const data = { id: 'ABC123', email: 'new@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(false)
|
||||
})
|
||||
|
||||
it('should exclude specified row from checks (for updates)', () => {
|
||||
const data = { id: 'abc123', email: 'john@example.com', name: 'John Updated' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows, 'row1')
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow null values for unique columns', () => {
|
||||
const data = { id: null, email: 'new@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow undefined values for unique columns', () => {
|
||||
const data = { email: 'new@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(true)
|
||||
})
|
||||
|
||||
it('should report multiple violations', () => {
|
||||
const data = { id: 'abc123', email: 'john@example.com', name: 'New User' }
|
||||
const result = validateUniqueConstraints(data, schema, existingRows)
|
||||
expect(result.valid).toBe(false)
|
||||
expect(result.errors).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
83
apps/sim/lib/table/billing.ts
Normal file
83
apps/sim/lib/table/billing.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
/**
|
||||
* Billing helpers for table feature limits.
|
||||
*
|
||||
* Uses workspace billing account to determine plan-based limits.
|
||||
*/
|
||||
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import { type PlanName, TABLE_PLAN_LIMITS, type TablePlanLimits } from './constants'
|
||||
|
||||
const logger = createLogger('TableBilling')
|
||||
|
||||
/**
|
||||
* Gets the table limits for a workspace based on its billing plan.
|
||||
*
|
||||
* Uses the workspace's billed account user to determine the subscription plan,
|
||||
* then returns the corresponding table limits.
|
||||
*
|
||||
* @param workspaceId - The workspace ID to get limits for
|
||||
* @returns Table limits based on the workspace's billing plan
|
||||
*/
|
||||
export async function getWorkspaceTableLimits(workspaceId: string): Promise<TablePlanLimits> {
|
||||
try {
|
||||
const billedAccountUserId = await getWorkspaceBilledAccountUserId(workspaceId)
|
||||
|
||||
if (!billedAccountUserId) {
|
||||
logger.warn('No billed account found for workspace, using free tier limits', { workspaceId })
|
||||
return TABLE_PLAN_LIMITS.free
|
||||
}
|
||||
|
||||
const subscriptionState = await getUserSubscriptionState(billedAccountUserId)
|
||||
const planName = subscriptionState.planName as PlanName
|
||||
|
||||
const limits = TABLE_PLAN_LIMITS[planName] ?? TABLE_PLAN_LIMITS.free
|
||||
|
||||
logger.info('Retrieved workspace table limits', {
|
||||
workspaceId,
|
||||
billedAccountUserId,
|
||||
planName,
|
||||
limits,
|
||||
})
|
||||
|
||||
return limits
|
||||
} catch (error) {
|
||||
logger.error('Error getting workspace table limits, falling back to free tier', {
|
||||
workspaceId,
|
||||
error,
|
||||
})
|
||||
return TABLE_PLAN_LIMITS.free
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a workspace can create more tables based on its plan limits.
|
||||
*
|
||||
* @param workspaceId - The workspace ID to check
|
||||
* @param currentTableCount - The current number of tables in the workspace
|
||||
* @returns Object with canCreate boolean and limit info
|
||||
*/
|
||||
export async function canCreateTable(
|
||||
workspaceId: string,
|
||||
currentTableCount: number
|
||||
): Promise<{ canCreate: boolean; maxTables: number; currentCount: number }> {
|
||||
const limits = await getWorkspaceTableLimits(workspaceId)
|
||||
|
||||
return {
|
||||
canCreate: currentTableCount < limits.maxTables,
|
||||
maxTables: limits.maxTables,
|
||||
currentCount: currentTableCount,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the maximum rows allowed per table for a workspace based on its plan.
|
||||
*
|
||||
* @param workspaceId - The workspace ID
|
||||
* @returns Maximum rows per table (-1 for unlimited)
|
||||
*/
|
||||
export async function getMaxRowsPerTable(workspaceId: string): Promise<number> {
|
||||
const limits = await getWorkspaceTableLimits(workspaceId)
|
||||
return limits.maxRowsPerTable
|
||||
}
|
||||
59
apps/sim/lib/table/constants.ts
Normal file
59
apps/sim/lib/table/constants.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Limits and constants for user-defined tables.
|
||||
*/
|
||||
|
||||
export const TABLE_LIMITS = {
|
||||
MAX_TABLES_PER_WORKSPACE: 100,
|
||||
MAX_ROWS_PER_TABLE: 10000,
|
||||
MAX_ROW_SIZE_BYTES: 100 * 1024, // 100KB
|
||||
MAX_COLUMNS_PER_TABLE: 50,
|
||||
MAX_TABLE_NAME_LENGTH: 50,
|
||||
MAX_COLUMN_NAME_LENGTH: 50,
|
||||
MAX_STRING_VALUE_LENGTH: 10000,
|
||||
MAX_DESCRIPTION_LENGTH: 500,
|
||||
DEFAULT_QUERY_LIMIT: 100,
|
||||
MAX_QUERY_LIMIT: 1000,
|
||||
/** Batch size for bulk update operations */
|
||||
UPDATE_BATCH_SIZE: 100,
|
||||
/** Batch size for bulk delete operations */
|
||||
DELETE_BATCH_SIZE: 1000,
|
||||
/** Maximum rows per batch insert */
|
||||
MAX_BATCH_INSERT_SIZE: 1000,
|
||||
/** Maximum rows per bulk update/delete operation */
|
||||
MAX_BULK_OPERATION_SIZE: 1000,
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Plan-based table limits.
|
||||
*/
|
||||
export const TABLE_PLAN_LIMITS = {
|
||||
free: {
|
||||
maxTables: 3,
|
||||
maxRowsPerTable: 1000,
|
||||
},
|
||||
pro: {
|
||||
maxTables: 25,
|
||||
maxRowsPerTable: 5000,
|
||||
},
|
||||
team: {
|
||||
maxTables: 100,
|
||||
maxRowsPerTable: 10000,
|
||||
},
|
||||
enterprise: {
|
||||
maxTables: 10000,
|
||||
maxRowsPerTable: 1000000,
|
||||
},
|
||||
} as const
|
||||
|
||||
export type PlanName = keyof typeof TABLE_PLAN_LIMITS
|
||||
|
||||
export interface TablePlanLimits {
|
||||
maxTables: number
|
||||
maxRowsPerTable: number
|
||||
}
|
||||
|
||||
export const COLUMN_TYPES = ['string', 'number', 'boolean', 'date', 'json'] as const
|
||||
|
||||
export const NAME_PATTERN = /^[a-z_][a-z0-9_]*$/i
|
||||
|
||||
export const USER_TABLE_ROWS_SQL_NAME = 'user_table_rows'
|
||||
1
apps/sim/lib/table/hooks/index.ts
Normal file
1
apps/sim/lib/table/hooks/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './use-table-columns'
|
||||
53
apps/sim/lib/table/hooks/use-table-columns.ts
Normal file
53
apps/sim/lib/table/hooks/use-table-columns.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import type { ColumnOption } from '../types'
|
||||
|
||||
interface UseTableColumnsOptions {
|
||||
tableId: string | null | undefined
|
||||
includeBuiltIn?: boolean
|
||||
}
|
||||
|
||||
/** Fetches table schema columns as dropdown options. */
|
||||
export function useTableColumns({ tableId, includeBuiltIn = false }: UseTableColumnsOptions) {
|
||||
const [columns, setColumns] = useState<ColumnOption[]>([])
|
||||
const fetchedTableIdRef = useRef<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
const fetchColumns = async () => {
|
||||
if (!tableId || tableId === fetchedTableIdRef.current) return
|
||||
|
||||
try {
|
||||
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
|
||||
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
|
||||
if (!workspaceId) return
|
||||
|
||||
const response = await fetch(`/api/table/${tableId}?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) return
|
||||
|
||||
const result = await response.json()
|
||||
const cols = result.data?.table?.schema?.columns || result.table?.schema?.columns || []
|
||||
const schemaCols = cols.map((col: { name: string }) => ({
|
||||
value: col.name,
|
||||
label: col.name,
|
||||
}))
|
||||
|
||||
if (includeBuiltIn) {
|
||||
const builtInCols = [
|
||||
{ value: 'createdAt', label: 'createdAt' },
|
||||
{ value: 'updatedAt', label: 'updatedAt' },
|
||||
]
|
||||
setColumns([...schemaCols, ...builtInCols])
|
||||
} else {
|
||||
setColumns(schemaCols)
|
||||
}
|
||||
|
||||
fetchedTableIdRef.current = tableId
|
||||
} catch {
|
||||
// Silently fail
|
||||
}
|
||||
}
|
||||
|
||||
fetchColumns()
|
||||
}, [tableId, includeBuiltIn])
|
||||
|
||||
return columns
|
||||
}
|
||||
15
apps/sim/lib/table/index.ts
Normal file
15
apps/sim/lib/table/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Table utilities module.
|
||||
*
|
||||
* Hooks are not re-exported here to avoid pulling React into server code.
|
||||
* Import hooks directly from '@/lib/table/hooks' in client components.
|
||||
*/
|
||||
|
||||
export * from './billing'
|
||||
export * from './constants'
|
||||
export * from './llm'
|
||||
export * from './query-builder'
|
||||
export * from './service'
|
||||
export * from './sql'
|
||||
export * from './types'
|
||||
export * from './validation'
|
||||
201
apps/sim/lib/table/llm/enrichment.ts
Normal file
201
apps/sim/lib/table/llm/enrichment.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
/**
|
||||
* LLM tool enrichment utilities for table operations.
|
||||
*
|
||||
* Provides functions to enrich tool descriptions and parameter schemas
|
||||
* with table-specific information so LLMs can construct proper queries.
|
||||
*/
|
||||
|
||||
import type { TableSummary } from '../types'
|
||||
|
||||
/**
|
||||
* Operations that use filters and need filter-specific enrichment.
|
||||
*/
|
||||
export const FILTER_OPERATIONS = new Set([
|
||||
'table_query_rows',
|
||||
'table_update_rows_by_filter',
|
||||
'table_delete_rows_by_filter',
|
||||
])
|
||||
|
||||
/**
|
||||
* Operations that need column info for data construction.
|
||||
*/
|
||||
export const DATA_OPERATIONS = new Set([
|
||||
'table_insert_row',
|
||||
'table_batch_insert_rows',
|
||||
'table_upsert_row',
|
||||
'table_update_row',
|
||||
])
|
||||
|
||||
/**
|
||||
* Enriches a table tool description with table information based on the operation type.
|
||||
*/
|
||||
export function enrichTableToolDescription(
|
||||
originalDescription: string,
|
||||
table: TableSummary,
|
||||
toolId: string
|
||||
): string {
|
||||
if (!table.columns || table.columns.length === 0) {
|
||||
return originalDescription
|
||||
}
|
||||
|
||||
const columnList = table.columns.map((col) => ` - ${col.name} (${col.type})`).join('\n')
|
||||
|
||||
if (FILTER_OPERATIONS.has(toolId)) {
|
||||
const stringCols = table.columns.filter((c) => c.type === 'string')
|
||||
const numberCols = table.columns.filter((c) => c.type === 'number')
|
||||
|
||||
let filterExample = ''
|
||||
if (stringCols.length > 0 && numberCols.length > 0) {
|
||||
filterExample = `
|
||||
|
||||
Example filter: {"${stringCols[0].name}": {"$eq": "value"}, "${numberCols[0].name}": {"$lt": 50}}`
|
||||
} else if (stringCols.length > 0) {
|
||||
filterExample = `
|
||||
|
||||
Example filter: {"${stringCols[0].name}": {"$eq": "value"}}`
|
||||
}
|
||||
|
||||
let sortExample = ''
|
||||
if (toolId === 'table_query_rows' && numberCols.length > 0) {
|
||||
sortExample = `
|
||||
Example sort: {"${numberCols[0].name}": "desc"} for highest first, {"${numberCols[0].name}": "asc"} for lowest first`
|
||||
}
|
||||
|
||||
const queryInstructions =
|
||||
toolId === 'table_query_rows'
|
||||
? `
|
||||
INSTRUCTIONS:
|
||||
1. ALWAYS include a filter based on the user's question - queries without filters will fail
|
||||
2. Construct the filter yourself from the user's question - do NOT ask for confirmation
|
||||
3. Use exact match ($eq) by default unless the user specifies otherwise
|
||||
4. For ranking queries (highest, lowest, Nth, top N):
|
||||
- ALWAYS use sort with the relevant column (e.g., {"salary": "desc"} for highest salary)
|
||||
- Use limit to get only the needed rows (e.g., limit=1 for highest, limit=2 for second highest)
|
||||
- For "second highest X", use sort: {"X": "desc"} with limit: 2, then take the second result
|
||||
5. Only use limit=1000 when you need ALL matching rows`
|
||||
: `
|
||||
INSTRUCTIONS:
|
||||
1. ALWAYS include a filter based on the user's question - queries without filters will fail
|
||||
2. Construct the filter yourself from the user's question - do NOT ask for confirmation
|
||||
3. Use exact match ($eq) by default unless the user specifies otherwise`
|
||||
|
||||
return `${originalDescription}
|
||||
${queryInstructions}
|
||||
|
||||
Table "${table.name}" columns:
|
||||
${columnList}
|
||||
${filterExample}${sortExample}`
|
||||
}
|
||||
|
||||
if (DATA_OPERATIONS.has(toolId)) {
|
||||
const exampleCols = table.columns.slice(0, 3)
|
||||
const dataExample = exampleCols.reduce(
|
||||
(obj, col) => {
|
||||
obj[col.name] = col.type === 'number' ? 123 : col.type === 'boolean' ? true : 'example'
|
||||
return obj
|
||||
},
|
||||
{} as Record<string, unknown>
|
||||
)
|
||||
|
||||
if (toolId === 'table_update_row') {
|
||||
return `${originalDescription}
|
||||
|
||||
Table "${table.name}" available columns:
|
||||
${columnList}
|
||||
|
||||
For updates, only include the fields you want to change. Example: {"${exampleCols[0]?.name || 'field'}": "new_value"}`
|
||||
}
|
||||
|
||||
return `${originalDescription}
|
||||
|
||||
Table "${table.name}" available columns:
|
||||
${columnList}
|
||||
|
||||
Pass the "data" parameter with an object like: ${JSON.stringify(dataExample)}`
|
||||
}
|
||||
|
||||
return `${originalDescription}
|
||||
|
||||
Table "${table.name}" columns:
|
||||
${columnList}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Enriches LLM tool parameters with table-specific information.
|
||||
*/
|
||||
export function enrichTableToolParameters(
|
||||
llmSchema: { properties?: Record<string, any>; required?: string[] },
|
||||
table: TableSummary,
|
||||
toolId: string
|
||||
): { properties: Record<string, any>; required: string[] } {
|
||||
if (!table.columns || table.columns.length === 0) {
|
||||
return {
|
||||
properties: llmSchema.properties || {},
|
||||
required: llmSchema.required || [],
|
||||
}
|
||||
}
|
||||
|
||||
const columnNames = table.columns.map((c) => c.name).join(', ')
|
||||
const enrichedProperties = { ...llmSchema.properties }
|
||||
const enrichedRequired = llmSchema.required ? [...llmSchema.required] : []
|
||||
|
||||
if (enrichedProperties.filter && FILTER_OPERATIONS.has(toolId)) {
|
||||
enrichedProperties.filter = {
|
||||
...enrichedProperties.filter,
|
||||
description: `REQUIRED - query will fail without a filter. Construct filter from user's question using columns: ${columnNames}. Syntax: {"column": {"$eq": "value"}}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (FILTER_OPERATIONS.has(toolId) && !enrichedRequired.includes('filter')) {
|
||||
enrichedRequired.push('filter')
|
||||
}
|
||||
|
||||
if (enrichedProperties.sort && toolId === 'table_query_rows') {
|
||||
enrichedProperties.sort = {
|
||||
...enrichedProperties.sort,
|
||||
description: `Sort order as {field: "asc"|"desc"}. REQUIRED for ranking queries (highest, lowest, Nth). Example: {"salary": "desc"} for highest salary first.`,
|
||||
}
|
||||
}
|
||||
|
||||
if (enrichedProperties.limit && toolId === 'table_query_rows') {
|
||||
enrichedProperties.limit = {
|
||||
...enrichedProperties.limit,
|
||||
description: `Maximum rows to return (min: 1, max: 1000, default: 100). For ranking queries: use limit=1 for highest/lowest, limit=2 for second highest, etc.`,
|
||||
}
|
||||
}
|
||||
|
||||
if (enrichedProperties.data && DATA_OPERATIONS.has(toolId)) {
|
||||
const exampleCols = table.columns.slice(0, 2)
|
||||
const exampleData = exampleCols.reduce(
|
||||
(obj: Record<string, unknown>, col: { name: string; type: string }) => {
|
||||
obj[col.name] = col.type === 'number' ? 123 : col.type === 'boolean' ? true : 'value'
|
||||
return obj
|
||||
},
|
||||
{} as Record<string, unknown>
|
||||
)
|
||||
|
||||
if (toolId === 'table_update_row') {
|
||||
enrichedProperties.data = {
|
||||
...enrichedProperties.data,
|
||||
description: `Object containing fields to update. Only include fields you want to change. Available columns: ${columnNames}`,
|
||||
}
|
||||
} else {
|
||||
enrichedProperties.data = {
|
||||
...enrichedProperties.data,
|
||||
description: `REQUIRED object containing row values. Use columns: ${columnNames}. Example value: ${JSON.stringify(exampleData)}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (enrichedProperties.rows && toolId === 'table_batch_insert_rows') {
|
||||
enrichedProperties.rows = {
|
||||
...enrichedProperties.rows,
|
||||
description: `REQUIRED. Array of row objects. Each object uses columns: ${columnNames}`,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
properties: enrichedProperties,
|
||||
required: enrichedRequired,
|
||||
}
|
||||
}
|
||||
1
apps/sim/lib/table/llm/index.ts
Normal file
1
apps/sim/lib/table/llm/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './enrichment'
|
||||
60
apps/sim/lib/table/llm/wand.ts
Normal file
60
apps/sim/lib/table/llm/wand.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
/**
|
||||
* Wand enricher for table schema context.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { userTableDefinitions } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { TableSchema } from '../types'
|
||||
|
||||
const logger = createLogger('TableWandEnricher')
|
||||
|
||||
/**
|
||||
* Wand enricher that provides table schema context.
|
||||
* Used by the wand API to inject table column information into the system prompt.
|
||||
*/
|
||||
export async function enrichTableSchema(
|
||||
workspaceId: string | null,
|
||||
context: Record<string, unknown>
|
||||
): Promise<string | null> {
|
||||
const tableId = context.tableId as string | undefined
|
||||
if (!tableId || !workspaceId) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const [table] = await db
|
||||
.select({
|
||||
name: userTableDefinitions.name,
|
||||
schema: userTableDefinitions.schema,
|
||||
})
|
||||
.from(userTableDefinitions)
|
||||
.where(
|
||||
and(eq(userTableDefinitions.id, tableId), eq(userTableDefinitions.workspaceId, workspaceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!table) {
|
||||
return null
|
||||
}
|
||||
|
||||
const schema = table.schema as TableSchema | null
|
||||
if (!schema?.columns?.length) {
|
||||
return null
|
||||
}
|
||||
|
||||
const columnLines = schema.columns
|
||||
.map((col) => {
|
||||
const flags = [col.type, col.required && 'required', col.unique && 'unique'].filter(Boolean)
|
||||
return `- ${col.name} (${flags.join(', ')})`
|
||||
})
|
||||
.join('\n')
|
||||
|
||||
const label = table.name ? `${table.name} (${tableId})` : tableId
|
||||
return `Table schema for ${label}:\n${columnLines}\nBuilt-in columns: createdAt, updatedAt`
|
||||
} catch (error) {
|
||||
logger.debug('Failed to fetch table schema', { tableId, error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
26
apps/sim/lib/table/query-builder/constants.ts
Normal file
26
apps/sim/lib/table/query-builder/constants.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Constants for table query builder UI (filtering and sorting).
|
||||
*/
|
||||
|
||||
export type { FilterRule, SortRule } from '../types'
|
||||
|
||||
export const COMPARISON_OPERATORS = [
|
||||
{ value: 'eq', label: 'equals' },
|
||||
{ value: 'ne', label: 'not equals' },
|
||||
{ value: 'gt', label: 'greater than' },
|
||||
{ value: 'gte', label: 'greater or equal' },
|
||||
{ value: 'lt', label: 'less than' },
|
||||
{ value: 'lte', label: 'less or equal' },
|
||||
{ value: 'contains', label: 'contains' },
|
||||
{ value: 'in', label: 'in array' },
|
||||
] as const
|
||||
|
||||
export const LOGICAL_OPERATORS = [
|
||||
{ value: 'and', label: 'and' },
|
||||
{ value: 'or', label: 'or' },
|
||||
] as const
|
||||
|
||||
export const SORT_DIRECTIONS = [
|
||||
{ value: 'asc', label: 'ascending' },
|
||||
{ value: 'desc', label: 'descending' },
|
||||
] as const
|
||||
167
apps/sim/lib/table/query-builder/converters.ts
Normal file
167
apps/sim/lib/table/query-builder/converters.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Converters for transforming between UI builder state and API filter/sort objects.
|
||||
*/
|
||||
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { Filter, FilterRule, JsonValue, Sort, SortDirection, SortRule } from '../types'
|
||||
|
||||
/** Converts UI filter rules to a Filter object for API queries. */
|
||||
export function filterRulesToFilter(rules: FilterRule[]): Filter | null {
|
||||
if (rules.length === 0) return null
|
||||
|
||||
const orGroups: Filter[] = []
|
||||
let currentGroup: Filter = {}
|
||||
|
||||
for (const rule of rules) {
|
||||
const isOr = rule.logicalOperator === 'or'
|
||||
const ruleValue = toRuleValue(rule.operator, rule.value)
|
||||
|
||||
if (isOr && Object.keys(currentGroup).length > 0) {
|
||||
orGroups.push({ ...currentGroup })
|
||||
currentGroup = {}
|
||||
}
|
||||
|
||||
currentGroup[rule.column] = ruleValue as Filter[string]
|
||||
}
|
||||
|
||||
if (Object.keys(currentGroup).length > 0) {
|
||||
orGroups.push(currentGroup)
|
||||
}
|
||||
|
||||
return orGroups.length > 1 ? { $or: orGroups } : orGroups[0] || null
|
||||
}
|
||||
|
||||
/** Converts a Filter object back to UI filter rules. */
|
||||
export function filterToRules(filter: Filter | null): FilterRule[] {
|
||||
if (!filter) return []
|
||||
|
||||
if (filter.$or && Array.isArray(filter.$or)) {
|
||||
const groups = filter.$or
|
||||
.map((orGroup) => parseFilterGroup(orGroup as Filter))
|
||||
.filter((group) => group.length > 0)
|
||||
return applyLogicalOperators(groups)
|
||||
}
|
||||
|
||||
return parseFilterGroup(filter)
|
||||
}
|
||||
|
||||
/** Converts a single UI sort rule to a Sort object for API queries. */
|
||||
export function sortRuleToSort(rule: SortRule | null): Sort | null {
|
||||
if (!rule || !rule.column) return null
|
||||
return { [rule.column]: rule.direction }
|
||||
}
|
||||
|
||||
/** Converts multiple UI sort rules to a Sort object. */
|
||||
export function sortRulesToSort(rules: SortRule[]): Sort | null {
|
||||
if (rules.length === 0) return null
|
||||
|
||||
const sort: Sort = {}
|
||||
for (const rule of rules) {
|
||||
if (rule.column) {
|
||||
sort[rule.column] = rule.direction
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(sort).length > 0 ? sort : null
|
||||
}
|
||||
|
||||
/** Converts a Sort object back to UI sort rules. */
|
||||
export function sortToRules(sort: Sort | null): SortRule[] {
|
||||
if (!sort) return []
|
||||
|
||||
return Object.entries(sort).map(([column, direction]) => ({
|
||||
id: nanoid(),
|
||||
column,
|
||||
direction: normalizeSortDirection(direction),
|
||||
}))
|
||||
}
|
||||
|
||||
function toRuleValue(operator: string, value: string): JsonValue {
|
||||
const parsedValue = parseValue(value, operator)
|
||||
return operator === 'eq' ? parsedValue : { [`$${operator}`]: parsedValue }
|
||||
}
|
||||
|
||||
function applyLogicalOperators(groups: FilterRule[][]): FilterRule[] {
|
||||
const rules: FilterRule[] = []
|
||||
|
||||
groups.forEach((group, groupIndex) => {
|
||||
group.forEach((rule, ruleIndex) => {
|
||||
rules.push({
|
||||
...rule,
|
||||
logicalOperator:
|
||||
groupIndex === 0 && ruleIndex === 0
|
||||
? 'and'
|
||||
: groupIndex > 0 && ruleIndex === 0
|
||||
? 'or'
|
||||
: 'and',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return rules
|
||||
}
|
||||
|
||||
function parseValue(value: string, operator: string): JsonValue {
|
||||
if (operator === 'in') {
|
||||
return value
|
||||
.split(',')
|
||||
.map((part) => part.trim())
|
||||
.map((part) => parseScalar(part))
|
||||
}
|
||||
|
||||
return parseScalar(value)
|
||||
}
|
||||
|
||||
function parseScalar(value: string): JsonValue {
|
||||
if (value === 'true') return true
|
||||
if (value === 'false') return false
|
||||
if (value === 'null') return null
|
||||
if (!Number.isNaN(Number(value)) && value !== '') return Number(value)
|
||||
return value
|
||||
}
|
||||
|
||||
function parseFilterGroup(group: Filter): FilterRule[] {
|
||||
if (!group || typeof group !== 'object' || Array.isArray(group)) return []
|
||||
|
||||
const rules: FilterRule[] = []
|
||||
|
||||
for (const [column, value] of Object.entries(group)) {
|
||||
if (column === '$or' || column === '$and') continue
|
||||
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
for (const [op, opValue] of Object.entries(value)) {
|
||||
if (op.startsWith('$')) {
|
||||
rules.push({
|
||||
id: nanoid(),
|
||||
logicalOperator: 'and',
|
||||
column,
|
||||
operator: op.substring(1),
|
||||
value: formatValueForBuilder(opValue as JsonValue),
|
||||
})
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
rules.push({
|
||||
id: nanoid(),
|
||||
logicalOperator: 'and',
|
||||
column,
|
||||
operator: 'eq',
|
||||
value: formatValueForBuilder(value as JsonValue),
|
||||
})
|
||||
}
|
||||
|
||||
return rules
|
||||
}
|
||||
|
||||
function formatValueForBuilder(value: JsonValue): string {
|
||||
if (value === null) return 'null'
|
||||
if (typeof value === 'boolean') return String(value)
|
||||
if (Array.isArray(value)) return value.map(formatValueForBuilder).join(', ')
|
||||
return String(value)
|
||||
}
|
||||
|
||||
function normalizeSortDirection(direction: string): SortDirection {
|
||||
return direction === 'desc' ? 'desc' : 'asc'
|
||||
}
|
||||
7
apps/sim/lib/table/query-builder/index.ts
Normal file
7
apps/sim/lib/table/query-builder/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Query builder UI utilities for filtering and sorting tables.
|
||||
*/
|
||||
|
||||
export * from './constants'
|
||||
export * from './converters'
|
||||
export * from './use-query-builder'
|
||||
161
apps/sim/lib/table/query-builder/use-query-builder.ts
Normal file
161
apps/sim/lib/table/query-builder/use-query-builder.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
/**
|
||||
* Hooks for query builder UI state management (filters and sorting).
|
||||
*/
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { ColumnOption } from '../types'
|
||||
import {
|
||||
COMPARISON_OPERATORS,
|
||||
type FilterRule,
|
||||
LOGICAL_OPERATORS,
|
||||
SORT_DIRECTIONS,
|
||||
type SortRule,
|
||||
} from './constants'
|
||||
|
||||
export type { ColumnOption }
|
||||
|
||||
/** Manages filter rule state with add/remove/update operations. */
|
||||
export function useFilterBuilder({
|
||||
columns,
|
||||
rules,
|
||||
setRules,
|
||||
isReadOnly = false,
|
||||
}: UseFilterBuilderProps): UseFilterBuilderReturn {
|
||||
const comparisonOptions = useMemo(
|
||||
() => COMPARISON_OPERATORS.map((op) => ({ value: op.value, label: op.label })),
|
||||
[]
|
||||
)
|
||||
|
||||
const logicalOptions = useMemo(
|
||||
() => LOGICAL_OPERATORS.map((op) => ({ value: op.value, label: op.label })),
|
||||
[]
|
||||
)
|
||||
|
||||
const sortDirectionOptions = useMemo(
|
||||
() => SORT_DIRECTIONS.map((d) => ({ value: d.value, label: d.label })),
|
||||
[]
|
||||
)
|
||||
|
||||
const createDefaultRule = useCallback((): FilterRule => {
|
||||
return {
|
||||
id: nanoid(),
|
||||
logicalOperator: 'and',
|
||||
column: columns[0]?.value || '',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
}
|
||||
}, [columns])
|
||||
|
||||
const addRule = useCallback(() => {
|
||||
if (isReadOnly) return
|
||||
setRules([...rules, createDefaultRule()])
|
||||
}, [isReadOnly, rules, setRules, createDefaultRule])
|
||||
|
||||
const removeRule = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
setRules(rules.filter((r) => r.id !== id))
|
||||
},
|
||||
[isReadOnly, rules, setRules]
|
||||
)
|
||||
|
||||
const updateRule = useCallback(
|
||||
(id: string, field: keyof FilterRule, value: string) => {
|
||||
if (isReadOnly) return
|
||||
setRules(rules.map((r) => (r.id === id ? { ...r, [field]: value } : r)))
|
||||
},
|
||||
[isReadOnly, rules, setRules]
|
||||
)
|
||||
|
||||
return {
|
||||
comparisonOptions,
|
||||
logicalOptions,
|
||||
sortDirectionOptions,
|
||||
addRule,
|
||||
removeRule,
|
||||
updateRule,
|
||||
createDefaultRule,
|
||||
}
|
||||
}
|
||||
|
||||
/** Manages sort rule state with add/remove/update operations. */
|
||||
export function useSortBuilder({
|
||||
columns,
|
||||
sortRule,
|
||||
setSortRule,
|
||||
}: UseSortBuilderProps): UseSortBuilderReturn {
|
||||
const sortDirectionOptions = useMemo(
|
||||
() => SORT_DIRECTIONS.map((d) => ({ value: d.value, label: d.label })),
|
||||
[]
|
||||
)
|
||||
|
||||
const addSort = useCallback(() => {
|
||||
setSortRule({
|
||||
id: nanoid(),
|
||||
column: columns[0]?.value || '',
|
||||
direction: 'asc',
|
||||
})
|
||||
}, [columns, setSortRule])
|
||||
|
||||
const removeSort = useCallback(() => {
|
||||
setSortRule(null)
|
||||
}, [setSortRule])
|
||||
|
||||
const updateSortColumn = useCallback(
|
||||
(column: string) => {
|
||||
if (sortRule) {
|
||||
setSortRule({ ...sortRule, column })
|
||||
}
|
||||
},
|
||||
[sortRule, setSortRule]
|
||||
)
|
||||
|
||||
const updateSortDirection = useCallback(
|
||||
(direction: 'asc' | 'desc') => {
|
||||
if (sortRule) {
|
||||
setSortRule({ ...sortRule, direction })
|
||||
}
|
||||
},
|
||||
[sortRule, setSortRule]
|
||||
)
|
||||
|
||||
return {
|
||||
sortDirectionOptions,
|
||||
addSort,
|
||||
removeSort,
|
||||
updateSortColumn,
|
||||
updateSortDirection,
|
||||
}
|
||||
}
|
||||
|
||||
export interface UseFilterBuilderProps {
|
||||
columns: ColumnOption[]
|
||||
rules: FilterRule[]
|
||||
setRules: (rules: FilterRule[]) => void
|
||||
isReadOnly?: boolean
|
||||
}
|
||||
|
||||
export interface UseFilterBuilderReturn {
|
||||
comparisonOptions: ColumnOption[]
|
||||
logicalOptions: ColumnOption[]
|
||||
sortDirectionOptions: ColumnOption[]
|
||||
addRule: () => void
|
||||
removeRule: (id: string) => void
|
||||
updateRule: (id: string, field: keyof FilterRule, value: string) => void
|
||||
createDefaultRule: () => FilterRule
|
||||
}
|
||||
|
||||
export interface UseSortBuilderProps {
|
||||
columns: ColumnOption[]
|
||||
sortRule: SortRule | null
|
||||
setSortRule: (sort: SortRule | null) => void
|
||||
}
|
||||
|
||||
export interface UseSortBuilderReturn {
|
||||
sortDirectionOptions: ColumnOption[]
|
||||
addSort: () => void
|
||||
removeSort: () => void
|
||||
updateSortColumn: (column: string) => void
|
||||
updateSortDirection: (direction: 'asc' | 'desc') => void
|
||||
}
|
||||
691
apps/sim/lib/table/service.ts
Normal file
691
apps/sim/lib/table/service.ts
Normal file
@@ -0,0 +1,691 @@
|
||||
/**
|
||||
* Table service layer for internal programmatic access.
|
||||
*
|
||||
* Use this for: workflow executor, background jobs, testing business logic.
|
||||
* Use API routes for: HTTP requests, frontend clients.
|
||||
*
|
||||
* Note: API routes have their own implementations for HTTP-specific concerns.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { userTableDefinitions, userTableRows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, eq, sql } from 'drizzle-orm'
|
||||
import { TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME } from './constants'
|
||||
import { buildFilterClause, buildSortClause } from './sql'
|
||||
import type {
|
||||
BatchInsertData,
|
||||
BulkDeleteData,
|
||||
BulkOperationResult,
|
||||
BulkUpdateData,
|
||||
CreateTableData,
|
||||
InsertRowData,
|
||||
QueryOptions,
|
||||
QueryResult,
|
||||
RowData,
|
||||
TableDefinition,
|
||||
TableRow,
|
||||
TableSchema,
|
||||
UpdateRowData,
|
||||
} from './types'
|
||||
import {
|
||||
checkBatchUniqueConstraintsDb,
|
||||
checkUniqueConstraintsDb,
|
||||
getUniqueColumns,
|
||||
validateRowAgainstSchema,
|
||||
validateRowSize,
|
||||
validateTableName,
|
||||
validateTableSchema,
|
||||
} from './validation'
|
||||
|
||||
const logger = createLogger('TableService')
|
||||
|
||||
/**
|
||||
* Gets a table by ID with full details.
|
||||
*
|
||||
* @param tableId - Table ID to fetch
|
||||
* @returns Table definition or null if not found
|
||||
*/
|
||||
export async function getTableById(tableId: string): Promise<TableDefinition | null> {
|
||||
const results = await db
|
||||
.select()
|
||||
.from(userTableDefinitions)
|
||||
.where(eq(userTableDefinitions.id, tableId))
|
||||
.limit(1)
|
||||
|
||||
if (results.length === 0) return null
|
||||
|
||||
const table = results[0]
|
||||
return {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: table.schema as TableSchema,
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
workspaceId: table.workspaceId,
|
||||
createdBy: table.createdBy,
|
||||
createdAt: table.createdAt,
|
||||
updatedAt: table.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all tables in a workspace.
|
||||
*
|
||||
* @param workspaceId - Workspace ID to list tables for
|
||||
* @returns Array of table definitions
|
||||
*/
|
||||
export async function listTables(workspaceId: string): Promise<TableDefinition[]> {
|
||||
const tables = await db
|
||||
.select()
|
||||
.from(userTableDefinitions)
|
||||
.where(eq(userTableDefinitions.workspaceId, workspaceId))
|
||||
.orderBy(userTableDefinitions.createdAt)
|
||||
|
||||
return tables.map((t) => ({
|
||||
id: t.id,
|
||||
name: t.name,
|
||||
description: t.description,
|
||||
schema: t.schema as TableSchema,
|
||||
rowCount: t.rowCount,
|
||||
maxRows: t.maxRows,
|
||||
workspaceId: t.workspaceId,
|
||||
createdBy: t.createdBy,
|
||||
createdAt: t.createdAt,
|
||||
updatedAt: t.updatedAt,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new table.
|
||||
*
|
||||
* @param data - Table creation data
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Created table definition
|
||||
* @throws Error if validation fails or limits exceeded
|
||||
*/
|
||||
export async function createTable(
|
||||
data: CreateTableData,
|
||||
requestId: string
|
||||
): Promise<TableDefinition> {
|
||||
// Validate table name
|
||||
const nameValidation = validateTableName(data.name)
|
||||
if (!nameValidation.valid) {
|
||||
throw new Error(`Invalid table name: ${nameValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
// Validate schema
|
||||
const schemaValidation = validateTableSchema(data.schema)
|
||||
if (!schemaValidation.valid) {
|
||||
throw new Error(`Invalid schema: ${schemaValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
// Check workspace table limit
|
||||
const existingCount = await db
|
||||
.select({ count: count() })
|
||||
.from(userTableDefinitions)
|
||||
.where(eq(userTableDefinitions.workspaceId, data.workspaceId))
|
||||
|
||||
if (existingCount[0].count >= TABLE_LIMITS.MAX_TABLES_PER_WORKSPACE) {
|
||||
throw new Error(
|
||||
`Workspace has reached maximum table limit (${TABLE_LIMITS.MAX_TABLES_PER_WORKSPACE})`
|
||||
)
|
||||
}
|
||||
|
||||
// Check for duplicate name
|
||||
const duplicateName = await db
|
||||
.select({ id: userTableDefinitions.id })
|
||||
.from(userTableDefinitions)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableDefinitions.workspaceId, data.workspaceId),
|
||||
eq(userTableDefinitions.name, data.name)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (duplicateName.length > 0) {
|
||||
throw new Error(`Table with name "${data.name}" already exists in this workspace`)
|
||||
}
|
||||
|
||||
const tableId = `tbl_${crypto.randomUUID().replace(/-/g, '')}`
|
||||
const now = new Date()
|
||||
|
||||
// Use provided maxRows (from billing plan) or fall back to default
|
||||
const maxRows = data.maxRows ?? TABLE_LIMITS.MAX_ROWS_PER_TABLE
|
||||
|
||||
const newTable = {
|
||||
id: tableId,
|
||||
name: data.name,
|
||||
description: data.description ?? null,
|
||||
schema: data.schema,
|
||||
workspaceId: data.workspaceId,
|
||||
createdBy: data.userId,
|
||||
maxRows,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}
|
||||
|
||||
await db.insert(userTableDefinitions).values(newTable)
|
||||
|
||||
logger.info(`[${requestId}] Created table ${tableId} in workspace ${data.workspaceId}`)
|
||||
|
||||
return {
|
||||
id: newTable.id,
|
||||
name: newTable.name,
|
||||
description: newTable.description,
|
||||
schema: newTable.schema as TableSchema,
|
||||
rowCount: 0,
|
||||
maxRows: newTable.maxRows,
|
||||
workspaceId: newTable.workspaceId,
|
||||
createdBy: newTable.createdBy,
|
||||
createdAt: newTable.createdAt,
|
||||
updatedAt: newTable.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a table (hard delete).
|
||||
*
|
||||
* @param tableId - Table ID to delete
|
||||
* @param requestId - Request ID for logging
|
||||
*/
|
||||
export async function deleteTable(tableId: string, requestId: string): Promise<void> {
|
||||
await db.transaction(async (trx) => {
|
||||
await trx.delete(userTableRows).where(eq(userTableRows.tableId, tableId))
|
||||
await trx.delete(userTableDefinitions).where(eq(userTableDefinitions.id, tableId))
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deleted table ${tableId}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts a single row into a table.
|
||||
*
|
||||
* @param data - Row insertion data
|
||||
* @param table - Table definition (to avoid re-fetching)
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Inserted row
|
||||
* @throws Error if validation fails or capacity exceeded
|
||||
*/
|
||||
export async function insertRow(
|
||||
data: InsertRowData,
|
||||
table: TableDefinition,
|
||||
requestId: string
|
||||
): Promise<TableRow> {
|
||||
// Check capacity using stored rowCount (maintained by database triggers)
|
||||
if (table.rowCount >= table.maxRows) {
|
||||
throw new Error(`Table has reached maximum row limit (${table.maxRows})`)
|
||||
}
|
||||
|
||||
// Validate row size
|
||||
const sizeValidation = validateRowSize(data.data)
|
||||
if (!sizeValidation.valid) {
|
||||
throw new Error(sizeValidation.errors.join(', '))
|
||||
}
|
||||
|
||||
// Validate against schema
|
||||
const schemaValidation = validateRowAgainstSchema(data.data, table.schema)
|
||||
if (!schemaValidation.valid) {
|
||||
throw new Error(`Schema validation failed: ${schemaValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
// Check unique constraints using optimized database query
|
||||
const uniqueColumns = getUniqueColumns(table.schema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(data.tableId, data.data, table.schema)
|
||||
if (!uniqueValidation.valid) {
|
||||
throw new Error(uniqueValidation.errors.join(', '))
|
||||
}
|
||||
}
|
||||
|
||||
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
|
||||
const now = new Date()
|
||||
|
||||
const newRow = {
|
||||
id: rowId,
|
||||
tableId: data.tableId,
|
||||
workspaceId: data.workspaceId,
|
||||
data: data.data,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}
|
||||
|
||||
await db.insert(userTableRows).values(newRow)
|
||||
|
||||
logger.info(`[${requestId}] Inserted row ${rowId} into table ${data.tableId}`)
|
||||
|
||||
return {
|
||||
id: newRow.id,
|
||||
data: newRow.data as RowData,
|
||||
createdAt: newRow.createdAt,
|
||||
updatedAt: newRow.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts multiple rows into a table.
|
||||
*
|
||||
* @param data - Batch insertion data
|
||||
* @param table - Table definition
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Array of inserted rows
|
||||
* @throws Error if validation fails or capacity exceeded
|
||||
*/
|
||||
export async function batchInsertRows(
|
||||
data: BatchInsertData,
|
||||
table: TableDefinition,
|
||||
requestId: string
|
||||
): Promise<TableRow[]> {
|
||||
// Check capacity using stored rowCount (maintained by database triggers)
|
||||
const remainingCapacity = table.maxRows - table.rowCount
|
||||
if (remainingCapacity < data.rows.length) {
|
||||
throw new Error(
|
||||
`Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`
|
||||
)
|
||||
}
|
||||
|
||||
// Validate all rows
|
||||
for (let i = 0; i < data.rows.length; i++) {
|
||||
const row = data.rows[i]
|
||||
|
||||
const sizeValidation = validateRowSize(row)
|
||||
if (!sizeValidation.valid) {
|
||||
throw new Error(`Row ${i + 1}: ${sizeValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
const schemaValidation = validateRowAgainstSchema(row, table.schema)
|
||||
if (!schemaValidation.valid) {
|
||||
throw new Error(`Row ${i + 1}: ${schemaValidation.errors.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Check unique constraints across all rows using optimized database query
|
||||
const uniqueColumns = getUniqueColumns(table.schema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
const uniqueResult = await checkBatchUniqueConstraintsDb(data.tableId, data.rows, table.schema)
|
||||
if (!uniqueResult.valid) {
|
||||
// Format errors for batch insert
|
||||
const errorMessages = uniqueResult.errors
|
||||
.map((e) => `Row ${e.row + 1}: ${e.errors.join(', ')}`)
|
||||
.join('; ')
|
||||
throw new Error(errorMessages)
|
||||
}
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const rowsToInsert = data.rows.map((rowData) => ({
|
||||
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
|
||||
tableId: data.tableId,
|
||||
workspaceId: data.workspaceId,
|
||||
data: rowData,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}))
|
||||
|
||||
await db.insert(userTableRows).values(rowsToInsert)
|
||||
|
||||
logger.info(`[${requestId}] Batch inserted ${data.rows.length} rows into table ${data.tableId}`)
|
||||
|
||||
return rowsToInsert.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data as RowData,
|
||||
createdAt: r.createdAt,
|
||||
updatedAt: r.updatedAt,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries rows from a table with filtering, sorting, and pagination.
|
||||
*
|
||||
* @param tableId - Table ID to query
|
||||
* @param workspaceId - Workspace ID for access control
|
||||
* @param options - Query options (filter, sort, limit, offset)
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Query result with rows and pagination info
|
||||
*/
|
||||
export async function queryRows(
|
||||
tableId: string,
|
||||
workspaceId: string,
|
||||
options: QueryOptions,
|
||||
requestId: string
|
||||
): Promise<QueryResult> {
|
||||
const { filter, sort, limit = TABLE_LIMITS.DEFAULT_QUERY_LIMIT, offset = 0 } = options
|
||||
|
||||
const tableName = USER_TABLE_ROWS_SQL_NAME
|
||||
|
||||
// Build WHERE clause
|
||||
const baseConditions = and(
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, workspaceId)
|
||||
)
|
||||
|
||||
let whereClause = baseConditions
|
||||
if (filter && Object.keys(filter).length > 0) {
|
||||
const filterClause = buildFilterClause(filter, tableName)
|
||||
if (filterClause) {
|
||||
whereClause = and(baseConditions, filterClause)
|
||||
}
|
||||
}
|
||||
|
||||
// Get total count
|
||||
const countResult = await db
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(whereClause ?? baseConditions)
|
||||
|
||||
const totalCount = Number(countResult[0].count)
|
||||
|
||||
// Build ORDER BY clause
|
||||
let orderByClause
|
||||
if (sort && Object.keys(sort).length > 0) {
|
||||
orderByClause = buildSortClause(sort, tableName)
|
||||
}
|
||||
|
||||
// Execute query
|
||||
let query = db
|
||||
.select()
|
||||
.from(userTableRows)
|
||||
.where(whereClause ?? baseConditions)
|
||||
|
||||
if (orderByClause) {
|
||||
query = query.orderBy(orderByClause) as typeof query
|
||||
}
|
||||
|
||||
const rows = await query.limit(limit).offset(offset)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
|
||||
)
|
||||
|
||||
return {
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data as RowData,
|
||||
createdAt: r.createdAt,
|
||||
updatedAt: r.updatedAt,
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount,
|
||||
limit,
|
||||
offset,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a single row by ID.
|
||||
*
|
||||
* @param tableId - Table ID
|
||||
* @param rowId - Row ID to fetch
|
||||
* @param workspaceId - Workspace ID for access control
|
||||
* @returns Row or null if not found
|
||||
*/
|
||||
export async function getRowById(
|
||||
tableId: string,
|
||||
rowId: string,
|
||||
workspaceId: string
|
||||
): Promise<TableRow | null> {
|
||||
const results = await db
|
||||
.select()
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.id, rowId),
|
||||
eq(userTableRows.tableId, tableId),
|
||||
eq(userTableRows.workspaceId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (results.length === 0) return null
|
||||
|
||||
const row = results[0]
|
||||
return {
|
||||
id: row.id,
|
||||
data: row.data as RowData,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a single row.
|
||||
*
|
||||
* @param data - Update data
|
||||
* @param table - Table definition
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Updated row
|
||||
* @throws Error if row not found or validation fails
|
||||
*/
|
||||
export async function updateRow(
|
||||
data: UpdateRowData,
|
||||
table: TableDefinition,
|
||||
requestId: string
|
||||
): Promise<TableRow> {
|
||||
// Get existing row
|
||||
const existingRow = await getRowById(data.tableId, data.rowId, data.workspaceId)
|
||||
if (!existingRow) {
|
||||
throw new Error('Row not found')
|
||||
}
|
||||
|
||||
// Validate size
|
||||
const sizeValidation = validateRowSize(data.data)
|
||||
if (!sizeValidation.valid) {
|
||||
throw new Error(sizeValidation.errors.join(', '))
|
||||
}
|
||||
|
||||
// Validate against schema
|
||||
const schemaValidation = validateRowAgainstSchema(data.data, table.schema)
|
||||
if (!schemaValidation.valid) {
|
||||
throw new Error(`Schema validation failed: ${schemaValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
// Check unique constraints using optimized database query
|
||||
const uniqueColumns = getUniqueColumns(table.schema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(
|
||||
data.tableId,
|
||||
data.data,
|
||||
table.schema,
|
||||
data.rowId // Exclude current row
|
||||
)
|
||||
if (!uniqueValidation.valid) {
|
||||
throw new Error(uniqueValidation.errors.join(', '))
|
||||
}
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
|
||||
await db
|
||||
.update(userTableRows)
|
||||
.set({ data: data.data, updatedAt: now })
|
||||
.where(eq(userTableRows.id, data.rowId))
|
||||
|
||||
logger.info(`[${requestId}] Updated row ${data.rowId} in table ${data.tableId}`)
|
||||
|
||||
return {
|
||||
id: data.rowId,
|
||||
data: data.data,
|
||||
createdAt: existingRow.createdAt,
|
||||
updatedAt: now,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a single row (hard delete).
|
||||
*
|
||||
* @param tableId - Table ID
|
||||
* @param rowId - Row ID to delete
|
||||
* @param workspaceId - Workspace ID for access control
|
||||
* @param requestId - Request ID for logging
|
||||
* @throws Error if row not found
|
||||
*/
|
||||
export async function deleteRow(
|
||||
tableId: string,
|
||||
rowId: string,
|
||||
workspaceId: string,
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
const existingRow = await getRowById(tableId, rowId, workspaceId)
|
||||
if (!existingRow) {
|
||||
throw new Error('Row not found')
|
||||
}
|
||||
|
||||
await db.delete(userTableRows).where(eq(userTableRows.id, rowId))
|
||||
|
||||
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates multiple rows matching a filter.
|
||||
*
|
||||
* @param data - Bulk update data
|
||||
* @param table - Table definition
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Bulk operation result
|
||||
*/
|
||||
export async function updateRowsByFilter(
|
||||
data: BulkUpdateData,
|
||||
table: TableDefinition,
|
||||
requestId: string
|
||||
): Promise<BulkOperationResult> {
|
||||
const tableName = USER_TABLE_ROWS_SQL_NAME
|
||||
|
||||
// Build filter clause
|
||||
const filterClause = buildFilterClause(data.filter, tableName)
|
||||
if (!filterClause) {
|
||||
throw new Error('Filter is required for bulk update')
|
||||
}
|
||||
|
||||
// Find matching rows
|
||||
const baseConditions = and(
|
||||
eq(userTableRows.tableId, data.tableId),
|
||||
eq(userTableRows.workspaceId, data.workspaceId)
|
||||
)
|
||||
|
||||
let query = db
|
||||
.select({ id: userTableRows.id, data: userTableRows.data })
|
||||
.from(userTableRows)
|
||||
.where(and(baseConditions, filterClause))
|
||||
|
||||
if (data.limit) {
|
||||
query = query.limit(data.limit) as typeof query
|
||||
}
|
||||
|
||||
const matchingRows = await query
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return { affectedCount: 0, affectedRowIds: [] }
|
||||
}
|
||||
|
||||
// Validate merged data for each row
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...data.data }
|
||||
|
||||
const sizeValidation = validateRowSize(mergedData)
|
||||
if (!sizeValidation.valid) {
|
||||
throw new Error(`Row ${row.id}: ${sizeValidation.errors.join(', ')}`)
|
||||
}
|
||||
|
||||
const schemaValidation = validateRowAgainstSchema(mergedData, table.schema)
|
||||
if (!schemaValidation.valid) {
|
||||
throw new Error(`Row ${row.id}: ${schemaValidation.errors.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Update in batches
|
||||
const now = new Date()
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
const updatePromises = batch.map((row) => {
|
||||
const existingData = row.data as RowData
|
||||
return trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: { ...existingData, ...data.data },
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, row.id))
|
||||
})
|
||||
await Promise.all(updatePromises)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${data.tableId}`)
|
||||
|
||||
return {
|
||||
affectedCount: matchingRows.length,
|
||||
affectedRowIds: matchingRows.map((r) => r.id),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes multiple rows matching a filter.
|
||||
*
|
||||
* @param data - Bulk delete data
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns Bulk operation result
|
||||
*/
|
||||
export async function deleteRowsByFilter(
|
||||
data: BulkDeleteData,
|
||||
requestId: string
|
||||
): Promise<BulkOperationResult> {
|
||||
const tableName = USER_TABLE_ROWS_SQL_NAME
|
||||
|
||||
// Build filter clause
|
||||
const filterClause = buildFilterClause(data.filter, tableName)
|
||||
if (!filterClause) {
|
||||
throw new Error('Filter is required for bulk delete')
|
||||
}
|
||||
|
||||
// Find matching rows
|
||||
const baseConditions = and(
|
||||
eq(userTableRows.tableId, data.tableId),
|
||||
eq(userTableRows.workspaceId, data.workspaceId)
|
||||
)
|
||||
|
||||
let query = db
|
||||
.select({ id: userTableRows.id })
|
||||
.from(userTableRows)
|
||||
.where(and(baseConditions, filterClause))
|
||||
|
||||
if (data.limit) {
|
||||
query = query.limit(data.limit) as typeof query
|
||||
}
|
||||
|
||||
const matchingRows = await query
|
||||
|
||||
if (matchingRows.length === 0) {
|
||||
return { affectedCount: 0, affectedRowIds: [] }
|
||||
}
|
||||
|
||||
const rowIds = matchingRows.map((r) => r.id)
|
||||
|
||||
// Delete in batches
|
||||
await db.transaction(async (trx) => {
|
||||
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
|
||||
await trx.delete(userTableRows).where(
|
||||
and(
|
||||
eq(userTableRows.tableId, data.tableId),
|
||||
eq(userTableRows.workspaceId, data.workspaceId),
|
||||
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
|
||||
batch.map((id) => sql`${id}`),
|
||||
sql`, `
|
||||
)}])`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${data.tableId}`)
|
||||
|
||||
return {
|
||||
affectedCount: matchingRows.length,
|
||||
affectedRowIds: rowIds,
|
||||
}
|
||||
}
|
||||
369
apps/sim/lib/table/sql.ts
Normal file
369
apps/sim/lib/table/sql.ts
Normal file
@@ -0,0 +1,369 @@
|
||||
/**
|
||||
* SQL query builder utilities for user-defined tables.
|
||||
*
|
||||
* Uses JSONB containment operator (@>) for equality to leverage GIN index.
|
||||
* Uses text extraction (->>) for comparisons and pattern matching.
|
||||
*/
|
||||
|
||||
import type { SQL } from 'drizzle-orm'
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { NAME_PATTERN } from './constants'
|
||||
import type { ColumnDefinition, ConditionOperators, Filter, JsonValue, Sort } from './types'
|
||||
|
||||
/**
|
||||
* Whitelist of allowed operators for query filtering.
|
||||
* Only these operators can be used in filter conditions.
|
||||
*/
|
||||
const ALLOWED_OPERATORS = new Set([
|
||||
'$eq',
|
||||
'$ne',
|
||||
'$gt',
|
||||
'$gte',
|
||||
'$lt',
|
||||
'$lte',
|
||||
'$in',
|
||||
'$nin',
|
||||
'$contains',
|
||||
])
|
||||
|
||||
/**
|
||||
* Builds a WHERE clause from a filter object.
|
||||
* Recursively processes logical operators ($or, $and) and field conditions.
|
||||
*
|
||||
* @param filter - Filter object with field conditions and logical operators
|
||||
* @param tableName - Table name for the query (e.g., 'user_table_rows')
|
||||
* @returns SQL WHERE clause or undefined if no filter specified
|
||||
* @throws Error if field name is invalid or operator is not allowed
|
||||
*
|
||||
* @example
|
||||
* // Simple equality
|
||||
* buildFilterClause({ name: 'John' }, 'user_table_rows')
|
||||
*
|
||||
* // Complex filter with operators
|
||||
* buildFilterClause({ age: { $gte: 18 }, status: { $in: ['active', 'pending'] } }, 'user_table_rows')
|
||||
*
|
||||
* // Logical operators
|
||||
* buildFilterClause({ $or: [{ status: 'active' }, { verified: true }] }, 'user_table_rows')
|
||||
*/
|
||||
export function buildFilterClause(filter: Filter, tableName: string): SQL | undefined {
|
||||
const conditions: SQL[] = []
|
||||
|
||||
for (const [field, condition] of Object.entries(filter)) {
|
||||
if (condition === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
// This represents a case where the filter is a logical OR of multiple filters
|
||||
// e.g. { $or: [{ status: 'active' }, { status: 'pending' }] }
|
||||
if (field === '$or' && Array.isArray(condition)) {
|
||||
const orClause = buildLogicalClause(condition as Filter[], tableName, 'OR')
|
||||
if (orClause) {
|
||||
conditions.push(orClause)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// This represents a case where the filter is a logical AND of multiple filters
|
||||
// e.g. { $and: [{ status: 'active' }, { status: 'pending' }] }
|
||||
if (field === '$and' && Array.isArray(condition)) {
|
||||
const andClause = buildLogicalClause(condition as Filter[], tableName, 'AND')
|
||||
if (andClause) {
|
||||
conditions.push(andClause)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip arrays for regular fields - arrays are only valid for $or and $and.
|
||||
// If we encounter an array here, it's likely malformed input (e.g., { name: [filter1, filter2] })
|
||||
// which doesn't have a clear semantic meaning, so we skip it.
|
||||
if (Array.isArray(condition)) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Build SQL conditions for this field. Returns array of SQL fragments for each operator.
|
||||
const fieldConditions = buildFieldCondition(
|
||||
tableName,
|
||||
field,
|
||||
condition as JsonValue | ConditionOperators
|
||||
)
|
||||
conditions.push(...fieldConditions)
|
||||
}
|
||||
|
||||
if (conditions.length === 0) return undefined
|
||||
if (conditions.length === 1) return conditions[0]
|
||||
|
||||
return sql.join(conditions, sql.raw(' AND '))
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an ORDER BY clause from a sort object.
|
||||
*
|
||||
* @param sort - Sort object with field names and directions
|
||||
* @param tableName - Table name for the query (e.g., 'user_table_rows')
|
||||
* @param columns - Optional column definitions for type-aware sorting
|
||||
* @returns SQL ORDER BY clause or undefined if no sort specified
|
||||
* @throws Error if field name is invalid
|
||||
*
|
||||
* @example
|
||||
* buildSortClause({ name: 'asc', age: 'desc' }, 'user_table_rows')
|
||||
* // Returns: ORDER BY data->>'name' ASC, data->>'age' DESC
|
||||
*
|
||||
* @example
|
||||
* // With column types for proper numeric sorting
|
||||
* buildSortClause({ salary: 'desc' }, 'user_table_rows', [{ name: 'salary', type: 'number' }])
|
||||
* // Returns: ORDER BY (data->>'salary')::numeric DESC NULLS LAST
|
||||
*/
|
||||
export function buildSortClause(
|
||||
sort: Sort,
|
||||
tableName: string,
|
||||
columns?: ColumnDefinition[]
|
||||
): SQL | undefined {
|
||||
const clauses: SQL[] = []
|
||||
const columnTypeMap = new Map(columns?.map((col) => [col.name, col.type]))
|
||||
|
||||
for (const [field, direction] of Object.entries(sort)) {
|
||||
validateFieldName(field)
|
||||
|
||||
if (direction !== 'asc' && direction !== 'desc') {
|
||||
throw new Error(`Invalid sort direction "${direction}". Must be "asc" or "desc".`)
|
||||
}
|
||||
|
||||
const columnType = columnTypeMap.get(field)
|
||||
clauses.push(buildSortFieldClause(tableName, field, direction, columnType))
|
||||
}
|
||||
|
||||
return clauses.length > 0 ? sql.join(clauses, sql.raw(', ')) : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a field name to prevent SQL injection.
|
||||
* Field names must match the NAME_PATTERN (alphanumeric + underscore, starting with letter/underscore).
|
||||
*
|
||||
* @param field - The field name to validate
|
||||
* @throws Error if field name is invalid
|
||||
*/
|
||||
function validateFieldName(field: string): void {
|
||||
if (!field || typeof field !== 'string') {
|
||||
throw new Error('Field name must be a non-empty string')
|
||||
}
|
||||
|
||||
if (!NAME_PATTERN.test(field)) {
|
||||
throw new Error(
|
||||
`Invalid field name "${field}". Field names must start with a letter or underscore, followed by alphanumeric characters or underscores.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an operator to ensure it's in the allowed list.
|
||||
*
|
||||
* @param operator - The operator to validate
|
||||
* @throws Error if operator is not allowed
|
||||
*/
|
||||
function validateOperator(operator: string): void {
|
||||
if (!ALLOWED_OPERATORS.has(operator)) {
|
||||
throw new Error(
|
||||
`Invalid operator "${operator}". Allowed operators: ${Array.from(ALLOWED_OPERATORS).join(', ')}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds SQL conditions for a single field based on the provided condition.
|
||||
*
|
||||
* Supports both simple equality checks (using JSONB containment) and complex
|
||||
* operators like comparison, membership, and pattern matching. Field names are
|
||||
* validated to prevent SQL injection, and operators are validated against an
|
||||
* allowed whitelist.
|
||||
*
|
||||
* @param tableName - The name of the table to query (used for SQL table reference)
|
||||
* @param field - The field name to filter on (must match NAME_PATTERN)
|
||||
* @param condition - Either a simple value (for equality) or a ConditionOperators
|
||||
* object with operators like $eq, $gt, $in, etc.
|
||||
* @returns Array of SQL condition fragments. Multiple conditions are returned
|
||||
* when the condition object contains multiple operators.
|
||||
* @throws Error if field name is invalid or operator is not allowed
|
||||
*/
|
||||
function buildFieldCondition(
|
||||
tableName: string,
|
||||
field: string,
|
||||
condition: JsonValue | ConditionOperators
|
||||
): SQL[] {
|
||||
validateFieldName(field)
|
||||
|
||||
const conditions: SQL[] = []
|
||||
|
||||
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
|
||||
for (const [op, value] of Object.entries(condition)) {
|
||||
// Validate operator to ensure only allowed operators are used
|
||||
validateOperator(op)
|
||||
|
||||
switch (op) {
|
||||
case '$eq':
|
||||
conditions.push(buildContainmentClause(tableName, field, value as JsonValue))
|
||||
break
|
||||
|
||||
case '$ne':
|
||||
conditions.push(
|
||||
sql`NOT (${buildContainmentClause(tableName, field, value as JsonValue)})`
|
||||
)
|
||||
break
|
||||
|
||||
case '$gt':
|
||||
conditions.push(buildComparisonClause(tableName, field, '>', value as number))
|
||||
break
|
||||
|
||||
case '$gte':
|
||||
conditions.push(buildComparisonClause(tableName, field, '>=', value as number))
|
||||
break
|
||||
|
||||
case '$lt':
|
||||
conditions.push(buildComparisonClause(tableName, field, '<', value as number))
|
||||
break
|
||||
|
||||
case '$lte':
|
||||
conditions.push(buildComparisonClause(tableName, field, '<=', value as number))
|
||||
break
|
||||
|
||||
case '$in':
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
if (value.length === 1) {
|
||||
// Single value then use containment clause
|
||||
conditions.push(buildContainmentClause(tableName, field, value[0]))
|
||||
} else {
|
||||
// Multiple values then use OR clause
|
||||
const inConditions = value.map((v) => buildContainmentClause(tableName, field, v))
|
||||
conditions.push(sql`(${sql.join(inConditions, sql.raw(' OR '))})`)
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
case '$nin':
|
||||
if (Array.isArray(value) && value.length > 0) {
|
||||
const ninConditions = value.map(
|
||||
(v) => sql`NOT (${buildContainmentClause(tableName, field, v)})`
|
||||
)
|
||||
conditions.push(sql`(${sql.join(ninConditions, sql.raw(' AND '))})`)
|
||||
}
|
||||
break
|
||||
|
||||
case '$contains':
|
||||
conditions.push(buildContainsClause(tableName, field, value as string))
|
||||
break
|
||||
|
||||
default:
|
||||
// This should never happen due to validateOperator, but added for completeness
|
||||
throw new Error(`Unsupported operator: ${op}`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Simple value (primitive or null) - shorthand for equality.
|
||||
// Example: { name: 'John' } is equivalent to { name: { $eq: 'John' } }
|
||||
conditions.push(buildContainmentClause(tableName, field, condition))
|
||||
}
|
||||
|
||||
return conditions
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds SQL clauses from nested filters and joins them with the specified operator.
|
||||
*
|
||||
* @example
|
||||
* // OR operator
|
||||
* buildLogicalClause(
|
||||
* [{ status: 'active' }, { status: 'pending' }],
|
||||
* 'user_table_rows',
|
||||
* 'OR'
|
||||
* )
|
||||
* // Returns: (data @> '{"status":"active"}'::jsonb OR data @> '{"status":"pending"}'::jsonb)
|
||||
*
|
||||
* @example
|
||||
* // AND operator
|
||||
* buildLogicalClause(
|
||||
* [{ age: { $gte: 18 } }, { verified: true }],
|
||||
* 'user_table_rows',
|
||||
* 'AND'
|
||||
* )
|
||||
* // Returns: ((data->>'age')::numeric >= 18 AND data @> '{"verified":true}'::jsonb)
|
||||
*/
|
||||
function buildLogicalClause(
|
||||
subFilters: Filter[],
|
||||
tableName: string,
|
||||
operator: 'OR' | 'AND'
|
||||
): SQL | undefined {
|
||||
const clauses: SQL[] = []
|
||||
for (const subFilter of subFilters) {
|
||||
const clause = buildFilterClause(subFilter, tableName)
|
||||
if (clause) {
|
||||
clauses.push(clause)
|
||||
}
|
||||
}
|
||||
|
||||
if (clauses.length === 0) return undefined
|
||||
if (clauses.length === 1) return clauses[0]
|
||||
|
||||
return sql`(${sql.join(clauses, sql.raw(` ${operator} `))})`
|
||||
}
|
||||
|
||||
/** Builds JSONB containment clause: `data @> '{"field": value}'::jsonb` (uses GIN index) */
|
||||
function buildContainmentClause(tableName: string, field: string, value: JsonValue): SQL {
|
||||
const jsonObj = JSON.stringify({ [field]: value })
|
||||
return sql`${sql.raw(`${tableName}.data`)} @> ${jsonObj}::jsonb`
|
||||
}
|
||||
|
||||
/** Builds numeric comparison: `(data->>'field')::numeric <op> value` (cannot use GIN index) */
|
||||
function buildComparisonClause(
|
||||
tableName: string,
|
||||
field: string,
|
||||
operator: '>' | '>=' | '<' | '<=',
|
||||
value: number
|
||||
): SQL {
|
||||
const escapedField = field.replace(/'/g, "''")
|
||||
return sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric ${sql.raw(operator)} ${value}`
|
||||
}
|
||||
|
||||
/** Builds case-insensitive pattern match: `data->>'field' ILIKE '%value%'` */
|
||||
function buildContainsClause(tableName: string, field: string, value: string): SQL {
|
||||
const escapedField = field.replace(/'/g, "''")
|
||||
return sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} ILIKE ${`%${value}%`}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a single ORDER BY clause for a field.
|
||||
* Timestamp fields use direct column access, others use JSONB text extraction.
|
||||
* Numeric and date columns are cast to appropriate types for correct sorting.
|
||||
*
|
||||
* @param tableName - The table name
|
||||
* @param field - The field name to sort by
|
||||
* @param direction - Sort direction ('asc' or 'desc')
|
||||
* @param columnType - Optional column type for type-aware sorting
|
||||
*/
|
||||
function buildSortFieldClause(
|
||||
tableName: string,
|
||||
field: string,
|
||||
direction: 'asc' | 'desc',
|
||||
columnType?: string
|
||||
): SQL {
|
||||
const escapedField = field.replace(/'/g, "''")
|
||||
const directionSql = direction.toUpperCase()
|
||||
|
||||
if (field === 'createdAt' || field === 'updatedAt') {
|
||||
return sql.raw(`${tableName}.${escapedField} ${directionSql}`)
|
||||
}
|
||||
|
||||
const jsonbExtract = `${tableName}.data->>'${escapedField}'`
|
||||
|
||||
// Cast to appropriate type for correct sorting
|
||||
if (columnType === 'number') {
|
||||
// Cast to numeric, with NULLS LAST to handle null/invalid values
|
||||
return sql.raw(`(${jsonbExtract})::numeric ${directionSql} NULLS LAST`)
|
||||
}
|
||||
|
||||
if (columnType === 'date') {
|
||||
// Cast to timestamp for chronological sorting
|
||||
return sql.raw(`(${jsonbExtract})::timestamp ${directionSql} NULLS LAST`)
|
||||
}
|
||||
|
||||
// Default: sort as text (for string, boolean, json, or unknown types)
|
||||
return sql.raw(`${jsonbExtract} ${directionSql}`)
|
||||
}
|
||||
188
apps/sim/lib/table/types.ts
Normal file
188
apps/sim/lib/table/types.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
/**
|
||||
* Type definitions for user-defined tables.
|
||||
*/
|
||||
|
||||
import type { COLUMN_TYPES } from './constants'
|
||||
|
||||
export type ColumnValue = string | number | boolean | null | Date
|
||||
export type JsonValue = ColumnValue | JsonValue[] | { [key: string]: JsonValue }
|
||||
|
||||
/** Row data mapping column names to values. */
|
||||
export type RowData = Record<string, JsonValue>
|
||||
|
||||
export type SortDirection = 'asc' | 'desc'
|
||||
|
||||
/** Sort specification mapping column names to direction. */
|
||||
export type Sort = Record<string, SortDirection>
|
||||
|
||||
/** Option for dropdown/select components. */
|
||||
export interface ColumnOption {
|
||||
value: string
|
||||
label: string
|
||||
}
|
||||
|
||||
export interface ColumnDefinition {
|
||||
name: string
|
||||
type: (typeof COLUMN_TYPES)[number]
|
||||
required?: boolean
|
||||
unique?: boolean
|
||||
}
|
||||
|
||||
export interface TableSchema {
|
||||
columns: ColumnDefinition[]
|
||||
}
|
||||
|
||||
export interface TableDefinition {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
schema: TableSchema
|
||||
rowCount: number
|
||||
maxRows: number
|
||||
workspaceId: string
|
||||
createdBy: string
|
||||
createdAt: Date | string
|
||||
updatedAt: Date | string
|
||||
}
|
||||
|
||||
/** Minimal table info for UI components. */
|
||||
export type TableInfo = Pick<TableDefinition, 'id' | 'name' | 'schema'>
|
||||
|
||||
/** Simplified table summary for LLM enrichment and display contexts. */
|
||||
export interface TableSummary {
|
||||
name: string
|
||||
columns: Array<Pick<ColumnDefinition, 'name' | 'type'>>
|
||||
}
|
||||
|
||||
export interface TableRow {
|
||||
id: string
|
||||
data: RowData
|
||||
createdAt: Date | string
|
||||
updatedAt: Date | string
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB-style query operators for field comparisons.
|
||||
*
|
||||
* @example
|
||||
* { $eq: 'John' }
|
||||
* { $gte: 18, $lt: 65 }
|
||||
* { $in: ['active', 'pending'] }
|
||||
*/
|
||||
export interface ConditionOperators {
|
||||
$eq?: ColumnValue
|
||||
$ne?: ColumnValue
|
||||
$gt?: number
|
||||
$gte?: number
|
||||
$lt?: number
|
||||
$lte?: number
|
||||
$in?: ColumnValue[]
|
||||
$nin?: ColumnValue[]
|
||||
$contains?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter object for querying table rows. Supports direct equality shorthand,
|
||||
* operator objects, and logical $or/$and combinators.
|
||||
*
|
||||
* @example
|
||||
* { name: 'John' }
|
||||
* { age: { $gte: 18 } }
|
||||
* { $or: [{ status: 'active' }, { status: 'pending' }] }
|
||||
*/
|
||||
export interface Filter {
|
||||
$or?: Filter[]
|
||||
$and?: Filter[]
|
||||
[key: string]: ColumnValue | ConditionOperators | Filter[] | undefined
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean
|
||||
errors: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* UI builder state for a single filter rule.
|
||||
* Includes an `id` field for React keys and string values for form inputs.
|
||||
*/
|
||||
export interface FilterRule {
|
||||
id: string
|
||||
logicalOperator: 'and' | 'or'
|
||||
column: string
|
||||
operator: string
|
||||
value: string
|
||||
}
|
||||
|
||||
/**
|
||||
* UI builder state for a single sort rule.
|
||||
* Includes an `id` field for React keys.
|
||||
*/
|
||||
export interface SortRule {
|
||||
id: string
|
||||
column: string
|
||||
direction: SortDirection
|
||||
}
|
||||
|
||||
export interface QueryOptions {
|
||||
filter?: Filter
|
||||
sort?: Sort
|
||||
limit?: number
|
||||
offset?: number
|
||||
}
|
||||
|
||||
export interface QueryResult {
|
||||
rows: TableRow[]
|
||||
rowCount: number
|
||||
totalCount: number
|
||||
limit: number
|
||||
offset: number
|
||||
}
|
||||
|
||||
export interface BulkOperationResult {
|
||||
affectedCount: number
|
||||
affectedRowIds: string[]
|
||||
}
|
||||
|
||||
export interface CreateTableData {
|
||||
name: string
|
||||
description?: string
|
||||
schema: TableSchema
|
||||
workspaceId: string
|
||||
userId: string
|
||||
/** Optional max rows override based on billing plan. Defaults to TABLE_LIMITS.MAX_ROWS_PER_TABLE. */
|
||||
maxRows?: number
|
||||
}
|
||||
|
||||
export interface InsertRowData {
|
||||
tableId: string
|
||||
data: RowData
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface BatchInsertData {
|
||||
tableId: string
|
||||
rows: RowData[]
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface UpdateRowData {
|
||||
tableId: string
|
||||
rowId: string
|
||||
data: RowData
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface BulkUpdateData {
|
||||
tableId: string
|
||||
filter: Filter
|
||||
data: RowData
|
||||
limit?: number
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface BulkDeleteData {
|
||||
tableId: string
|
||||
filter: Filter
|
||||
limit?: number
|
||||
workspaceId: string
|
||||
}
|
||||
533
apps/sim/lib/table/validation.ts
Normal file
533
apps/sim/lib/table/validation.ts
Normal file
@@ -0,0 +1,533 @@
|
||||
/**
|
||||
* Validation utilities for table schemas and row data.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { userTableRows } from '@sim/db/schema'
|
||||
import { and, eq, or, sql } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS } from './constants'
|
||||
import type { ColumnDefinition, RowData, TableSchema, ValidationResult } from './types'
|
||||
|
||||
export type { ColumnDefinition, TableSchema, ValidationResult }
|
||||
|
||||
type ValidationSuccess = { valid: true }
|
||||
type ValidationFailure = { valid: false; response: NextResponse }
|
||||
|
||||
/** Options for validating a single row. */
|
||||
export interface ValidateRowOptions {
|
||||
rowData: RowData
|
||||
schema: TableSchema
|
||||
tableId: string
|
||||
excludeRowId?: string
|
||||
checkUnique?: boolean
|
||||
}
|
||||
|
||||
/** Error information for a single row in batch validation. */
|
||||
export interface BatchRowError {
|
||||
row: number
|
||||
errors: string[]
|
||||
}
|
||||
|
||||
/** Options for validating multiple rows in batch. */
|
||||
export interface ValidateBatchRowsOptions {
|
||||
rows: RowData[]
|
||||
schema: TableSchema
|
||||
tableId: string
|
||||
checkUnique?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a single row (size, schema, unique constraints) and returns a formatted response on failure.
|
||||
* Uses optimized database queries for unique constraint checks to avoid loading all rows into memory.
|
||||
*/
|
||||
export async function validateRowData(
|
||||
options: ValidateRowOptions
|
||||
): Promise<ValidationSuccess | ValidationFailure> {
|
||||
const { rowData, schema, tableId, excludeRowId, checkUnique = true } = options
|
||||
|
||||
const sizeValidation = validateRowSize(rowData)
|
||||
if (!sizeValidation.valid) {
|
||||
return {
|
||||
valid: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Invalid row data', details: sizeValidation.errors },
|
||||
{ status: 400 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
const schemaValidation = validateRowAgainstSchema(rowData, schema)
|
||||
if (!schemaValidation.valid) {
|
||||
return {
|
||||
valid: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Row data does not match schema', details: schemaValidation.errors },
|
||||
{ status: 400 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
if (checkUnique) {
|
||||
// Use optimized database query instead of loading all rows
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(tableId, rowData, schema, excludeRowId)
|
||||
|
||||
if (!uniqueValidation.valid) {
|
||||
return {
|
||||
valid: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
|
||||
{ status: 400 }
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates multiple rows for batch insert (size, schema, unique constraints including within batch).
|
||||
* Uses optimized database queries for unique constraint checks to avoid loading all rows into memory.
|
||||
*/
|
||||
export async function validateBatchRows(
|
||||
options: ValidateBatchRowsOptions
|
||||
): Promise<ValidationSuccess | ValidationFailure> {
|
||||
const { rows, schema, tableId, checkUnique = true } = options
|
||||
const errors: BatchRowError[] = []
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const rowData = rows[i]
|
||||
|
||||
const sizeValidation = validateRowSize(rowData)
|
||||
if (!sizeValidation.valid) {
|
||||
errors.push({ row: i, errors: sizeValidation.errors })
|
||||
continue
|
||||
}
|
||||
|
||||
const schemaValidation = validateRowAgainstSchema(rowData, schema)
|
||||
if (!schemaValidation.valid) {
|
||||
errors.push({ row: i, errors: schemaValidation.errors })
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
return {
|
||||
valid: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Validation failed for some rows', details: errors },
|
||||
{ status: 400 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
if (checkUnique) {
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
// Use optimized batch unique constraint check
|
||||
const uniqueResult = await checkBatchUniqueConstraintsDb(tableId, rows, schema)
|
||||
|
||||
if (!uniqueResult.valid) {
|
||||
return {
|
||||
valid: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Unique constraint violations in batch', details: uniqueResult.errors },
|
||||
{ status: 400 }
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
/** Validates table name format and length. */
|
||||
export function validateTableName(name: string): ValidationResult {
|
||||
const errors: string[] = []
|
||||
|
||||
if (!name || typeof name !== 'string') {
|
||||
errors.push('Table name is required')
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (name.length > TABLE_LIMITS.MAX_TABLE_NAME_LENGTH) {
|
||||
errors.push(
|
||||
`Table name exceeds maximum length (${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters)`
|
||||
)
|
||||
}
|
||||
|
||||
if (!NAME_PATTERN.test(name)) {
|
||||
errors.push(
|
||||
'Table name must start with letter or underscore, followed by alphanumeric or underscore'
|
||||
)
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
/** Validates table schema structure and column definitions. */
|
||||
export function validateTableSchema(schema: TableSchema): ValidationResult {
|
||||
const errors: string[] = []
|
||||
|
||||
if (!schema || typeof schema !== 'object') {
|
||||
errors.push('Schema is required')
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (!Array.isArray(schema.columns)) {
|
||||
errors.push('Schema must have columns array')
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (schema.columns.length === 0) {
|
||||
errors.push('Schema must have at least one column')
|
||||
}
|
||||
|
||||
if (schema.columns.length > TABLE_LIMITS.MAX_COLUMNS_PER_TABLE) {
|
||||
errors.push(`Schema exceeds maximum columns (${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE})`)
|
||||
}
|
||||
|
||||
for (const column of schema.columns) {
|
||||
const columnResult = validateColumnDefinition(column)
|
||||
errors.push(...columnResult.errors)
|
||||
}
|
||||
|
||||
const columnNames = schema.columns.map((c) => c.name.toLowerCase())
|
||||
const uniqueNames = new Set(columnNames)
|
||||
if (uniqueNames.size !== columnNames.length) {
|
||||
errors.push('Duplicate column names found')
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
/** Validates row data matches schema column types and required fields. */
|
||||
export function validateRowAgainstSchema(data: RowData, schema: TableSchema): ValidationResult {
|
||||
const errors: string[] = []
|
||||
|
||||
for (const column of schema.columns) {
|
||||
const value = data[column.name]
|
||||
|
||||
if (column.required && (value === undefined || value === null)) {
|
||||
errors.push(`Missing required field: ${column.name}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (value === null || value === undefined) continue
|
||||
|
||||
switch (column.type) {
|
||||
case 'string':
|
||||
if (typeof value !== 'string') {
|
||||
errors.push(`${column.name} must be string, got ${typeof value}`)
|
||||
} else if (value.length > TABLE_LIMITS.MAX_STRING_VALUE_LENGTH) {
|
||||
errors.push(`${column.name} exceeds max string length`)
|
||||
}
|
||||
break
|
||||
case 'number':
|
||||
if (typeof value !== 'number' || Number.isNaN(value)) {
|
||||
errors.push(`${column.name} must be number`)
|
||||
}
|
||||
break
|
||||
case 'boolean':
|
||||
if (typeof value !== 'boolean') {
|
||||
errors.push(`${column.name} must be boolean`)
|
||||
}
|
||||
break
|
||||
case 'date':
|
||||
if (
|
||||
!(value instanceof Date) &&
|
||||
(typeof value !== 'string' || Number.isNaN(Date.parse(value)))
|
||||
) {
|
||||
errors.push(`${column.name} must be valid date`)
|
||||
}
|
||||
break
|
||||
case 'json':
|
||||
try {
|
||||
JSON.stringify(value)
|
||||
} catch {
|
||||
errors.push(`${column.name} must be valid JSON`)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
/** Validates row data size is within limits. */
|
||||
export function validateRowSize(data: RowData): ValidationResult {
|
||||
const size = JSON.stringify(data).length
|
||||
if (size > TABLE_LIMITS.MAX_ROW_SIZE_BYTES) {
|
||||
return {
|
||||
valid: false,
|
||||
errors: [`Row size exceeds limit (${size} bytes > ${TABLE_LIMITS.MAX_ROW_SIZE_BYTES} bytes)`],
|
||||
}
|
||||
}
|
||||
return { valid: true, errors: [] }
|
||||
}
|
||||
|
||||
/** Returns columns with unique constraint. */
|
||||
export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] {
|
||||
return schema.columns.filter((col) => col.unique === true)
|
||||
}
|
||||
|
||||
/** Validates unique constraints against existing rows (in-memory version for batch validation within a batch). */
|
||||
export function validateUniqueConstraints(
|
||||
data: RowData,
|
||||
schema: TableSchema,
|
||||
existingRows: { id: string; data: RowData }[],
|
||||
excludeRowId?: string
|
||||
): ValidationResult {
|
||||
const errors: string[] = []
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
|
||||
for (const column of uniqueColumns) {
|
||||
const value = data[column.name]
|
||||
if (value === null || value === undefined) continue
|
||||
|
||||
const duplicate = existingRows.find((row) => {
|
||||
if (excludeRowId && row.id === excludeRowId) return false
|
||||
|
||||
const existingValue = row.data[column.name]
|
||||
if (typeof value === 'string' && typeof existingValue === 'string') {
|
||||
return value.toLowerCase() === existingValue.toLowerCase()
|
||||
}
|
||||
return value === existingValue
|
||||
})
|
||||
|
||||
if (duplicate) {
|
||||
errors.push(
|
||||
`Column "${column.name}" must be unique. Value "${value}" already exists in row ${duplicate.id}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks unique constraints using targeted database queries.
|
||||
* Only queries for specific conflicting values instead of loading all rows.
|
||||
* This reduces memory usage from O(n) to O(1) where n is the number of rows.
|
||||
*/
|
||||
export async function checkUniqueConstraintsDb(
|
||||
tableId: string,
|
||||
data: RowData,
|
||||
schema: TableSchema,
|
||||
excludeRowId?: string
|
||||
): Promise<ValidationResult> {
|
||||
const errors: string[] = []
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
|
||||
if (uniqueColumns.length === 0) {
|
||||
return { valid: true, errors: [] }
|
||||
}
|
||||
|
||||
// Build conditions for each unique column value
|
||||
const conditions = []
|
||||
|
||||
for (const column of uniqueColumns) {
|
||||
const value = data[column.name]
|
||||
if (value === null || value === undefined) continue
|
||||
|
||||
// Use JSONB operators to check for existing values
|
||||
// For strings, use case-insensitive comparison
|
||||
if (typeof value === 'string') {
|
||||
conditions.push({
|
||||
column,
|
||||
value,
|
||||
sql: sql`lower(${userTableRows.data}->>${sql.raw(`'${column.name}'`)}) = ${value.toLowerCase()}`,
|
||||
})
|
||||
} else {
|
||||
// For other types, use direct JSONB comparison
|
||||
conditions.push({
|
||||
column,
|
||||
value,
|
||||
sql: sql`(${userTableRows.data}->${sql.raw(`'${column.name}'`)})::jsonb = ${JSON.stringify(value)}::jsonb`,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (conditions.length === 0) {
|
||||
return { valid: true, errors: [] }
|
||||
}
|
||||
|
||||
// Query for each unique column separately to provide specific error messages
|
||||
for (const condition of conditions) {
|
||||
const baseCondition = and(eq(userTableRows.tableId, tableId), condition.sql)
|
||||
|
||||
const whereClause = excludeRowId
|
||||
? and(baseCondition, sql`${userTableRows.id} != ${excludeRowId}`)
|
||||
: baseCondition
|
||||
|
||||
const conflictingRow = await db
|
||||
.select({ id: userTableRows.id })
|
||||
.from(userTableRows)
|
||||
.where(whereClause)
|
||||
.limit(1)
|
||||
|
||||
if (conflictingRow.length > 0) {
|
||||
errors.push(
|
||||
`Column "${condition.column.name}" must be unique. Value "${condition.value}" already exists in row ${conflictingRow[0].id}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks unique constraints for a batch of rows using targeted database queries.
|
||||
* Validates both against existing database rows and within the batch itself.
|
||||
*/
|
||||
export async function checkBatchUniqueConstraintsDb(
|
||||
tableId: string,
|
||||
rows: RowData[],
|
||||
schema: TableSchema
|
||||
): Promise<{ valid: boolean; errors: Array<{ row: number; errors: string[] }> }> {
|
||||
const uniqueColumns = getUniqueColumns(schema)
|
||||
const rowErrors: Array<{ row: number; errors: string[] }> = []
|
||||
|
||||
if (uniqueColumns.length === 0) {
|
||||
return { valid: true, errors: [] }
|
||||
}
|
||||
|
||||
// Build a set of all unique values for each column to check against DB
|
||||
const valuesByColumn = new Map<string, { values: Set<string>; column: ColumnDefinition }>()
|
||||
|
||||
for (const column of uniqueColumns) {
|
||||
valuesByColumn.set(column.name, { values: new Set(), column })
|
||||
}
|
||||
|
||||
// Collect all unique values from the batch and check for duplicates within the batch
|
||||
const batchValueMap = new Map<string, Map<string, number>>() // columnName -> (normalizedValue -> firstRowIndex)
|
||||
|
||||
for (const column of uniqueColumns) {
|
||||
batchValueMap.set(column.name, new Map())
|
||||
}
|
||||
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const rowData = rows[i]
|
||||
const currentRowErrors: string[] = []
|
||||
|
||||
for (const column of uniqueColumns) {
|
||||
const value = rowData[column.name]
|
||||
if (value === null || value === undefined) continue
|
||||
|
||||
const normalizedValue =
|
||||
typeof value === 'string' ? value.toLowerCase() : JSON.stringify(value)
|
||||
|
||||
// Check for duplicate within batch
|
||||
const columnValueMap = batchValueMap.get(column.name)!
|
||||
if (columnValueMap.has(normalizedValue)) {
|
||||
const firstRowIndex = columnValueMap.get(normalizedValue)!
|
||||
currentRowErrors.push(
|
||||
`Column "${column.name}" must be unique. Value "${value}" duplicates row ${firstRowIndex + 1} in batch`
|
||||
)
|
||||
} else {
|
||||
columnValueMap.set(normalizedValue, i)
|
||||
valuesByColumn.get(column.name)!.values.add(normalizedValue)
|
||||
}
|
||||
}
|
||||
|
||||
if (currentRowErrors.length > 0) {
|
||||
rowErrors.push({ row: i, errors: currentRowErrors })
|
||||
}
|
||||
}
|
||||
|
||||
// Now check against database for all unique values at once
|
||||
for (const [columnName, { values, column }] of valuesByColumn) {
|
||||
if (values.size === 0) continue
|
||||
|
||||
// Build OR conditions for all values of this column
|
||||
const valueArray = Array.from(values)
|
||||
const valueConditions = valueArray.map((normalizedValue) => {
|
||||
// Check if the original values are strings (normalized values for strings are lowercase)
|
||||
// We need to determine the type from the column definition or the first row that has this value
|
||||
const isStringColumn = column.type === 'string'
|
||||
|
||||
if (isStringColumn) {
|
||||
return sql`lower(${userTableRows.data}->>${sql.raw(`'${columnName}'`)}) = ${normalizedValue}`
|
||||
}
|
||||
return sql`(${userTableRows.data}->${sql.raw(`'${columnName}'`)})::jsonb = ${normalizedValue}::jsonb`
|
||||
})
|
||||
|
||||
const conflictingRows = await db
|
||||
.select({
|
||||
id: userTableRows.id,
|
||||
data: userTableRows.data,
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(and(eq(userTableRows.tableId, tableId), or(...valueConditions)))
|
||||
.limit(valueArray.length) // We only need up to one conflict per value
|
||||
|
||||
// Map conflicts back to batch rows
|
||||
for (const conflict of conflictingRows) {
|
||||
const conflictData = conflict.data as RowData
|
||||
const conflictValue = conflictData[columnName]
|
||||
const normalizedConflictValue =
|
||||
typeof conflictValue === 'string'
|
||||
? conflictValue.toLowerCase()
|
||||
: JSON.stringify(conflictValue)
|
||||
|
||||
// Find which batch rows have this conflicting value
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const rowValue = rows[i][columnName]
|
||||
if (rowValue === null || rowValue === undefined) continue
|
||||
|
||||
const normalizedRowValue =
|
||||
typeof rowValue === 'string' ? rowValue.toLowerCase() : JSON.stringify(rowValue)
|
||||
|
||||
if (normalizedRowValue === normalizedConflictValue) {
|
||||
// Check if this row already has errors for this column
|
||||
let rowError = rowErrors.find((e) => e.row === i)
|
||||
if (!rowError) {
|
||||
rowError = { row: i, errors: [] }
|
||||
rowErrors.push(rowError)
|
||||
}
|
||||
|
||||
const errorMsg = `Column "${columnName}" must be unique. Value "${rowValue}" already exists in row ${conflict.id}`
|
||||
if (!rowError.errors.includes(errorMsg)) {
|
||||
rowError.errors.push(errorMsg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort errors by row index
|
||||
rowErrors.sort((a, b) => a.row - b.row)
|
||||
|
||||
return { valid: rowErrors.length === 0, errors: rowErrors }
|
||||
}
|
||||
|
||||
/** Validates column definition format and type. */
|
||||
export function validateColumnDefinition(column: ColumnDefinition): ValidationResult {
|
||||
const errors: string[] = []
|
||||
|
||||
if (!column.name || typeof column.name !== 'string') {
|
||||
errors.push('Column name is required')
|
||||
return { valid: false, errors }
|
||||
}
|
||||
|
||||
if (column.name.length > TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH) {
|
||||
errors.push(
|
||||
`Column name "${column.name}" exceeds maximum length (${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters)`
|
||||
)
|
||||
}
|
||||
|
||||
if (!NAME_PATTERN.test(column.name)) {
|
||||
errors.push(
|
||||
`Column name "${column.name}" must start with letter or underscore, followed by alphanumeric or underscore`
|
||||
)
|
||||
}
|
||||
|
||||
if (!COLUMN_TYPES.includes(column.type)) {
|
||||
errors.push(
|
||||
`Column "${column.name}" has invalid type "${column.type}". Valid types: ${COLUMN_TYPES.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
return { valid: errors.length === 0, errors }
|
||||
}
|
||||
@@ -488,11 +488,14 @@ export async function transformBlockTool(
|
||||
|
||||
const userProvidedParams = block.params || {}
|
||||
|
||||
const llmSchema = await createLLMToolSchema(toolConfig, userProvidedParams)
|
||||
const { schema: llmSchema, enrichedDescription } = await createLLMToolSchema(
|
||||
toolConfig,
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
let uniqueToolId = toolConfig.id
|
||||
let toolName = toolConfig.name
|
||||
let toolDescription = toolConfig.description
|
||||
let toolDescription = enrichedDescription || toolConfig.description
|
||||
|
||||
if (toolId === 'workflow_executor' && userProvidedParams.workflowId) {
|
||||
uniqueToolId = `${toolConfig.id}_${userProvidedParams.workflowId}`
|
||||
@@ -509,6 +512,8 @@ export async function transformBlockTool(
|
||||
}
|
||||
} else if (toolId.startsWith('knowledge_') && userProvidedParams.knowledgeBaseId) {
|
||||
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
|
||||
} else if (toolId.startsWith('table_') && userProvidedParams.tableId) {
|
||||
uniqueToolId = `${toolConfig.id}_${userProvidedParams.tableId}`
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -57,6 +57,54 @@ const ERROR_EXTRACTORS: ErrorExtractorConfig[] = [
|
||||
examples: ['Various REST APIs'],
|
||||
extract: (errorInfo) => errorInfo?.data?.details?.[0]?.message,
|
||||
},
|
||||
{
|
||||
id: 'details-string-array',
|
||||
description: 'Details array containing strings (validation errors)',
|
||||
examples: ['Table API', 'Validation APIs'],
|
||||
extract: (errorInfo) => {
|
||||
const details = errorInfo?.data?.details
|
||||
if (!Array.isArray(details) || details.length === 0) return undefined
|
||||
|
||||
// Check if it's an array of strings
|
||||
if (details.every((d) => typeof d === 'string')) {
|
||||
const errorMessage = errorInfo?.data?.error || 'Validation failed'
|
||||
return `${errorMessage}: ${details.join('; ')}`
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'batch-validation-errors',
|
||||
description: 'Batch validation errors with row numbers and error arrays',
|
||||
examples: ['Table Batch Insert'],
|
||||
extract: (errorInfo) => {
|
||||
const details = errorInfo?.data?.details
|
||||
if (!Array.isArray(details) || details.length === 0) return undefined
|
||||
|
||||
// Check if it's an array of objects with row numbers and errors
|
||||
if (
|
||||
details.every(
|
||||
(d) =>
|
||||
typeof d === 'object' &&
|
||||
d !== null &&
|
||||
'row' in d &&
|
||||
'errors' in d &&
|
||||
Array.isArray(d.errors)
|
||||
)
|
||||
) {
|
||||
const errorMessage = errorInfo?.data?.error || 'Validation failed'
|
||||
const rowErrors = details
|
||||
.map((detail: { row: number; errors: string[] }) => {
|
||||
return `Row ${detail.row}: ${detail.errors.join(', ')}`
|
||||
})
|
||||
.join('; ')
|
||||
return `${errorMessage}: ${rowErrors}`
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'hunter-errors',
|
||||
description: 'Hunter API error details',
|
||||
@@ -176,6 +224,8 @@ export const ErrorExtractorId = {
|
||||
GRAPHQL_ERRORS: 'graphql-errors',
|
||||
TWITTER_ERRORS: 'twitter-errors',
|
||||
DETAILS_ARRAY: 'details-array',
|
||||
DETAILS_STRING_ARRAY: 'details-string-array',
|
||||
BATCH_VALIDATION_ERRORS: 'batch-validation-errors',
|
||||
HUNTER_ERRORS: 'hunter-errors',
|
||||
ERRORS_ARRAY_STRING: 'errors-array-string',
|
||||
TELEGRAM_DESCRIPTION: 'telegram-description',
|
||||
|
||||
@@ -19,22 +19,41 @@ import {
|
||||
const logger = createLogger('Tools')
|
||||
|
||||
/**
|
||||
* Normalizes a tool ID by stripping resource ID suffix (UUID).
|
||||
* Normalizes a tool ID by stripping resource ID suffix (UUID/tableId).
|
||||
* Workflow tools: 'workflow_executor_<uuid>' -> 'workflow_executor'
|
||||
* Knowledge tools: 'knowledge_search_<uuid>' -> 'knowledge_search'
|
||||
* Table tools: 'table_query_rows_<tableId>' -> 'table_query_rows'
|
||||
*/
|
||||
function normalizeToolId(toolId: string): string {
|
||||
// Check for workflow_executor_<uuid> pattern
|
||||
if (toolId.startsWith('workflow_executor_') && toolId.length > 'workflow_executor_'.length) {
|
||||
return 'workflow_executor'
|
||||
}
|
||||
// Check for knowledge_<operation>_<uuid> pattern
|
||||
|
||||
const knowledgeOps = ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document']
|
||||
for (const op of knowledgeOps) {
|
||||
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
|
||||
return op
|
||||
}
|
||||
}
|
||||
|
||||
const tableOps = [
|
||||
'table_query_rows',
|
||||
'table_insert_row',
|
||||
'table_batch_insert_rows',
|
||||
'table_update_row',
|
||||
'table_update_rows_by_filter',
|
||||
'table_delete_rows_by_filter',
|
||||
'table_upsert_row',
|
||||
'table_get_row',
|
||||
'table_delete_row',
|
||||
'table_get_schema',
|
||||
]
|
||||
for (const op of tableOps) {
|
||||
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
|
||||
return op
|
||||
}
|
||||
}
|
||||
|
||||
return toolId
|
||||
}
|
||||
|
||||
|
||||
@@ -89,7 +89,7 @@ describe('Tool Parameters Utils', () => {
|
||||
channel: '#general',
|
||||
}
|
||||
|
||||
const schema = await createLLMToolSchema(mockToolConfig, userProvidedParams)
|
||||
const { schema } = await createLLMToolSchema(mockToolConfig, userProvidedParams)
|
||||
|
||||
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, excluded
|
||||
expect(schema.properties).not.toHaveProperty('channel') // user-provided, excluded
|
||||
@@ -100,7 +100,7 @@ describe('Tool Parameters Utils', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should include all parameters when none are user-provided', async () => {
|
||||
const schema = await createLLMToolSchema(mockToolConfig, {})
|
||||
const { schema } = await createLLMToolSchema(mockToolConfig, {})
|
||||
|
||||
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, never shown to LLM
|
||||
expect(schema.properties).toHaveProperty('message') // user-or-llm, shown to LLM
|
||||
@@ -332,7 +332,10 @@ describe('Tool Parameters Utils', () => {
|
||||
inputMapping: '{}',
|
||||
}
|
||||
|
||||
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
|
||||
const { schema } = await createLLMToolSchema(
|
||||
mockWorkflowExecutorConfig,
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
expect(schema.properties).toHaveProperty('inputMapping')
|
||||
expect(schema.properties.inputMapping.type).toBe('object')
|
||||
@@ -347,7 +350,10 @@ describe('Tool Parameters Utils', () => {
|
||||
inputMapping: '{"query": "", "limit": ""}',
|
||||
}
|
||||
|
||||
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
|
||||
const { schema } = await createLLMToolSchema(
|
||||
mockWorkflowExecutorConfig,
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
expect(schema.properties).toHaveProperty('inputMapping')
|
||||
}
|
||||
@@ -360,7 +366,10 @@ describe('Tool Parameters Utils', () => {
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
|
||||
const { schema } = await createLLMToolSchema(
|
||||
mockWorkflowExecutorConfig,
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
expect(schema.properties).toHaveProperty('inputMapping')
|
||||
}
|
||||
@@ -371,7 +380,7 @@ describe('Tool Parameters Utils', () => {
|
||||
workflowId: 'workflow-123',
|
||||
}
|
||||
|
||||
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
|
||||
const { schema } = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
|
||||
|
||||
expect(schema.properties).not.toHaveProperty('workflowId')
|
||||
expect(schema.properties).toHaveProperty('inputMapping')
|
||||
@@ -545,7 +554,7 @@ describe('Tool Parameters Utils', () => {
|
||||
|
||||
describe('Type Interface Validation', () => {
|
||||
it.concurrent('should have properly typed ToolSchema', async () => {
|
||||
const schema: ToolSchema = await createLLMToolSchema(mockToolConfig, {})
|
||||
const { schema } = await createLLMToolSchema(mockToolConfig, {})
|
||||
|
||||
expect(schema.type).toBe('object')
|
||||
expect(typeof schema.properties).toBe('object')
|
||||
|
||||
@@ -109,6 +109,11 @@ export interface ToolSchema {
|
||||
required: string[]
|
||||
}
|
||||
|
||||
export interface LLMToolSchemaResult {
|
||||
schema: ToolSchema
|
||||
enrichedDescription?: string
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean
|
||||
missingParams: string[]
|
||||
@@ -419,27 +424,19 @@ export function createUserToolSchema(toolConfig: ToolConfig): ToolSchema {
|
||||
export async function createLLMToolSchema(
|
||||
toolConfig: ToolConfig,
|
||||
userProvidedParams: Record<string, unknown>
|
||||
): Promise<ToolSchema> {
|
||||
): Promise<LLMToolSchemaResult> {
|
||||
const schema: ToolSchema = {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: [],
|
||||
}
|
||||
|
||||
// Only include parameters that the LLM should/can provide
|
||||
for (const [paramId, param] of Object.entries(toolConfig.params)) {
|
||||
// Check if this param has schema enrichment config
|
||||
const enrichmentConfig = toolConfig.schemaEnrichment?.[paramId]
|
||||
|
||||
// Special handling for workflow_executor's inputMapping parameter
|
||||
// Always include in LLM schema so LLM can provide dynamic input values
|
||||
// even if user has configured empty/partial inputMapping in the UI
|
||||
const isWorkflowInputMapping =
|
||||
toolConfig.id === 'workflow_executor' && paramId === 'inputMapping'
|
||||
|
||||
// Parameters with enrichment config are treated specially:
|
||||
// - Include them if dependency value is available (even if normally hidden)
|
||||
// - Skip them if dependency value is not available
|
||||
if (enrichmentConfig) {
|
||||
const dependencyValue = userProvidedParams[enrichmentConfig.dependsOn] as string
|
||||
if (!dependencyValue) {
|
||||
@@ -461,26 +458,21 @@ export async function createLLMToolSchema(
|
||||
}
|
||||
|
||||
if (!isWorkflowInputMapping) {
|
||||
// Skip parameters that user has already provided
|
||||
if (isNonEmpty(userProvidedParams[paramId])) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip parameters that are user-only (never shown to LLM)
|
||||
if (param.visibility === 'user-only') {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip hidden parameters
|
||||
if (param.visibility === 'hidden') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Add parameter to LLM schema
|
||||
const propertySchema = buildParameterSchema(toolConfig.id, paramId, param)
|
||||
|
||||
// Apply dynamic schema enrichment for workflow_executor's inputMapping
|
||||
if (isWorkflowInputMapping) {
|
||||
const workflowId = userProvidedParams.workflowId as string
|
||||
if (workflowId) {
|
||||
@@ -490,13 +482,29 @@ export async function createLLMToolSchema(
|
||||
|
||||
schema.properties[paramId] = propertySchema
|
||||
|
||||
// Add to required if LLM must provide it and it's originally required
|
||||
if ((param.visibility === 'user-or-llm' || param.visibility === 'llm-only') && param.required) {
|
||||
schema.required.push(paramId)
|
||||
}
|
||||
}
|
||||
|
||||
return schema
|
||||
if (toolConfig.toolEnrichment) {
|
||||
const dependencyValue = userProvidedParams[toolConfig.toolEnrichment.dependsOn] as string
|
||||
if (dependencyValue) {
|
||||
const enriched = await toolConfig.toolEnrichment.enrichTool(
|
||||
dependencyValue,
|
||||
schema,
|
||||
toolConfig.description
|
||||
)
|
||||
if (enriched) {
|
||||
return {
|
||||
schema: enriched.parameters as ToolSchema,
|
||||
enrichedDescription: enriched.description,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { schema }
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1391,6 +1391,7 @@ import {
|
||||
spotifyUnfollowPlaylistTool,
|
||||
spotifyUpdatePlaylistTool,
|
||||
} from '@/tools/spotify'
|
||||
import { sqsSendTool } from '@/tools/sqs'
|
||||
import {
|
||||
sshCheckCommandExistsTool,
|
||||
sshCheckFileExistsTool,
|
||||
@@ -1490,6 +1491,20 @@ import {
|
||||
supabaseUpsertTool,
|
||||
supabaseVectorSearchTool,
|
||||
} from '@/tools/supabase'
|
||||
import {
|
||||
tableBatchInsertRowsTool,
|
||||
tableCreateTool,
|
||||
tableDeleteRowsByFilterTool,
|
||||
tableDeleteRowTool,
|
||||
tableGetRowTool,
|
||||
tableGetSchemaTool,
|
||||
tableInsertRowTool,
|
||||
tableListTool,
|
||||
tableQueryRowsTool,
|
||||
tableUpdateRowsByFilterTool,
|
||||
tableUpdateRowTool,
|
||||
tableUpsertRowTool,
|
||||
} from '@/tools/table'
|
||||
import { tavilyCrawlTool, tavilyExtractTool, tavilyMapTool, tavilySearchTool } from '@/tools/tavily'
|
||||
import {
|
||||
telegramDeleteMessageTool,
|
||||
@@ -1653,7 +1668,6 @@ import {
|
||||
zoomListRecordingsTool,
|
||||
zoomUpdateMeetingTool,
|
||||
} from '@/tools/zoom'
|
||||
import { sqsSendTool } from './sqs'
|
||||
|
||||
// Registry of all available tools
|
||||
export const tools: Record<string, ToolConfig> = {
|
||||
@@ -2957,6 +2971,18 @@ export const tools: Record<string, ToolConfig> = {
|
||||
salesforce_describe_object: salesforceDescribeObjectTool,
|
||||
salesforce_list_objects: salesforceListObjectsTool,
|
||||
sqs_send: sqsSendTool,
|
||||
table_create: tableCreateTool,
|
||||
table_list: tableListTool,
|
||||
table_insert_row: tableInsertRowTool,
|
||||
table_batch_insert_rows: tableBatchInsertRowsTool,
|
||||
table_upsert_row: tableUpsertRowTool,
|
||||
table_update_row: tableUpdateRowTool,
|
||||
table_update_rows_by_filter: tableUpdateRowsByFilterTool,
|
||||
table_delete_row: tableDeleteRowTool,
|
||||
table_delete_rows_by_filter: tableDeleteRowsByFilterTool,
|
||||
table_query_rows: tableQueryRowsTool,
|
||||
table_get_row: tableGetRowTool,
|
||||
table_get_schema: tableGetSchemaTool,
|
||||
mailchimp_get_audiences: mailchimpGetAudiencesTool,
|
||||
mailchimp_get_audience: mailchimpGetAudienceTool,
|
||||
mailchimp_create_audience: mailchimpCreateAudienceTool,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user