Compare commits

...

103 Commits

Author SHA1 Message Date
waleed
f1938f008a update table-selector 2026-02-19 12:16:57 -08:00
waleed
592dd46dd8 migrate to use tanstack query for all server state 2026-02-19 11:54:06 -08:00
waleed
b476b8d9e0 reran migrations 2026-02-19 11:14:11 -08:00
waleed
c18fbcf9f2 Merge branch 'staging' into lakees/db
# Conflicts:
#	apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/tool-input.tsx
#	apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx
#	packages/db/migrations/meta/0154_snapshot.json
#	packages/db/migrations/meta/_journal.json
2026-02-19 11:13:20 -08:00
Vikhyath Mondreti
c4986a96e5 fix lint 2026-02-11 00:59:54 -08:00
Vikhyath Mondreti
8bf3370481 fix permissive auth 2026-02-11 00:59:25 -08:00
Vikhyath Mondreti
ca70280ba1 fix tables auth 2026-02-11 00:56:51 -08:00
Vikhyath Mondreti
0ffcce87ea add back migrations 2026-02-11 00:50:19 -08:00
Vikhyath Mondreti
f0a8d82f62 Merge remote-tracking branch 'origin/staging' into lakees/db 2026-02-11 00:48:55 -08:00
Vikhyath Mondreti
d9dbe93d6b remove conflicting migration 2026-02-11 00:34:59 -08:00
Vikhyath Mondreti
be757a4f1e adhere to size limits for tables 2026-01-21 17:20:33 -08:00
Vikhyath Mondreti
1938818027 address bugbot concerns 2026-01-21 17:13:58 -08:00
Vikhyath Mondreti
2818b745d1 migrate enrichment logic to general abstraction 2026-01-21 17:08:20 -08:00
Vikhyath Mondreti
2d49de76ea add back missed code 2026-01-21 16:37:28 -08:00
Vikhyath Mondreti
1f682eb343 readd migrations 2026-01-21 16:34:32 -08:00
Vikhyath Mondreti
8d43947eb5 Merge staging into lakees/db
- Resolve merge conflicts in input-format.tsx, workflow-block.tsx, providers/utils.ts
- Fix tests to use blockData/blockNameMapping for tag variable resolution
- Add getBlockOutputs mock to block.test.ts for schema validation tests
- Fix normalizeName import path in utils.test.ts
- Add sql.raw and sql.join to drizzle-orm mock for sql.test.ts
- Add new subBlock types (table-selector, filter-builder, sort-builder) to blocks.test.ts
2026-01-21 16:33:31 -08:00
Vikhyath Mondreti
107679bf41 prepare merge 2026-01-21 16:25:13 -08:00
Lakee Sivaraya
a8e413a999 fix 2026-01-17 13:04:07 -08:00
Lakee Sivaraya
f05f5bbc6d fix 2026-01-17 12:57:37 -08:00
Lakee Sivaraya
87f8fcdbf2 fix 2026-01-17 12:49:36 -08:00
Lakee Sivaraya
6e8dc771fe fix 2026-01-17 10:16:45 -08:00
Lakee Sivaraya
d0c3c6aec7 updates 2026-01-17 10:02:52 -08:00
Lakee Sivaraya
8574d66aac uncook 2026-01-17 09:58:48 -08:00
Lakee Sivaraya
e79e9e7367 Merge origin/main into lakees/db
Resolved conflicts:
- workflow-block.tsx: Kept both table types and schedule hooks
- types.ts: Kept both filter-builder/sort-builder and deprecated comment
- icons.tsx (both apps): Kept TableIcon and added ReductoIcon/PulseIcon
- Migration files: Accepted main branch versions
2026-01-17 09:11:51 -08:00
Lakee Sivaraya
4b6de03a62 revert 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
37b50cbce6 dedupe 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
7ca628db13 rename 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
118e4f65f0 updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
292cd39cfb docs 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
ea77790484 docs 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
895591514a updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
0e1133fc42 fix error handling 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
4357230a9d fix 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
e7f45166af type fix 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
c662a31ac8 db updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
51d1b958e2 updates 2026-01-16 18:39:17 -08:00
Lakee Sivaraya
3d81c1cc14 revert 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
94c6795efc updates 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
86c5e1b4ff updates 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
cca1772ae1 simplify 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
e4dd14df7a undo 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
448b8f056c undo changes 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
abb671e61b rename 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
f90c9c7593 undo 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
2e624c20b5 reduced type confusion 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
7093209bce refactor 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
897891ee1e updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
42aa794713 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
ea72ab5aa9 simplicifcaiton 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
5173320bb5 clean comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
26d96624af comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
271375df9b rename 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
a940dd6351 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
e69500726b rm 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
c94bb5acda updating prompt to make it user sort 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
fef2d2cc82 fix appearnce 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
44909964b7 fix sorting 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
1a13762617 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
cfffd050a2 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
d00997c5ea updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
466559578e validation 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
0a6312dbac better comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
e503408825 renames 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
ed543a71f9 u[dates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
7f894ec023 simplify comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
57fbd2aa1c fixes 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
80270ce7b2 fix comments 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
fdc3af994c updates 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
5a69d16e65 wand 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
c3afbaebce update db 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
793c888808 undo 2026-01-16 18:37:59 -08:00
Lakee Sivaraya
ffad20efc5 updates 2026-01-16 18:37:59 -08:00
Lakee Sivaraya
b08ce03409 refactoring 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
c9373c7b3e renames & refactors 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
cbb93c65b6 refactoring 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
96a3fe59ff updates 2026-01-16 18:37:57 -08:00
Lakee Sivaraya
df3e869f22 updates 2026-01-16 18:37:57 -08:00
Lakee Sivaraya
b3ca0c947c updates 2026-01-16 18:37:54 -08:00
Lakee Sivaraya
cfbc8d7211 dedupe 2026-01-16 18:37:54 -08:00
Lakee Sivaraya
15bef489f2 updates 2026-01-16 18:37:53 -08:00
Lakee Sivaraya
4422a69a17 revert 2026-01-16 18:37:52 -08:00
Lakee Sivaraya
8f9cf93231 changes 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
22f89cf67d comments 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
dfa018f2d4 updates 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
e287388b03 update comments with ai 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
4d176c0717 breaking down file 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
c155d8ac6c doc strings 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
48250f5ed8 chages 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
fc6dbcf066 updates 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
a537ca7ebe updates 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
c1eef30578 improved errors 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
6605c887ed fix lints 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
a919816bff format 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
8a8589e18d one input mode 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
ed807bebf2 updates 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
48ecb19af7 updates 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
9a3d5631f2 updates 2026-01-16 18:37:47 -08:00
Lakee Sivaraya
0872314fbf filtering ui 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
7e4fc32d82 updates 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
4316f45175 updates 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
e80660f218 trashy table viewer 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
5dddb03eac required 2026-01-16 18:37:44 -08:00
Lakee Sivaraya
6386e6b437 updates 2026-01-16 18:37:44 -08:00
119 changed files with 23292 additions and 100 deletions

View File

@@ -4964,6 +4964,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TableIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth={2}
strokeLinecap='round'
strokeLinejoin='round'
{...props}
>
<rect width='18' height='18' x='3' y='3' rx='2' />
<path d='M3 9h18' />
<path d='M3 15h18' />
<path d='M9 3v18' />
<path d='M15 3v18' />
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -114,6 +114,7 @@
"stripe",
"stt",
"supabase",
"table",
"tavily",
"telegram",
"textract",

View File

@@ -0,0 +1,351 @@
---
title: Table
description: User-defined data tables for storing and querying structured data
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="table"
color="#10B981"
/>
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
**Why Use Tables?**
- **No external setup**: Create tables instantly without configuring external databases
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
**Key Features:**
- Create tables with custom schemas
- Insert, update, upsert, and delete rows
- Query with filters and sorting
- Batch operations for bulk inserts
- Bulk updates and deletes by filter
- Up to 10,000 rows per table, 100 tables per workspace
## Creating Tables
Tables are created from the **Tables** section in the sidebar. Each table requires:
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
- **Description**: Optional description of the table's purpose
- **Schema**: Define columns with name, type, and optional constraints
### Column Types
| Type | Description | Example Values |
|------|-------------|----------------|
| `string` | Text data | `"John Doe"`, `"active"` |
| `number` | Numeric data | `42`, `99.99` |
| `boolean` | True/false values | `true`, `false` |
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
### Column Constraints
- **Required**: Column must have a value (cannot be null)
- **Unique**: Values must be unique across all rows (enables upsert matching)
## Usage Instructions
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
## Tools
### `table_query_rows`
Query rows from a table with filtering, sorting, and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | No | Filter conditions using MongoDB-style operators |
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
| `offset` | number | No | Number of rows to skip \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether query succeeded |
| `rows` | array | Query result rows |
| `rowCount` | number | Number of rows returned |
| `totalCount` | number | Total rows matching filter |
| `limit` | number | Limit used in query |
| `offset` | number | Offset used in query |
### `table_insert_row`
Insert a new row into a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data as JSON object matching the table schema |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was inserted |
| `row` | object | Inserted row data including generated ID |
| `message` | string | Status message |
### `table_upsert_row`
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data to insert or update |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was upserted |
| `row` | object | Upserted row data |
| `operation` | string | Operation performed: "insert" or "update" |
| `message` | string | Status message |
### `table_batch_insert_rows`
Insert multiple rows at once (up to 1000 rows per batch)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rows` | array | Yes | Array of row data objects to insert |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether batch insert succeeded |
| `rows` | array | Array of inserted rows with IDs |
| `insertedCount` | number | Number of rows inserted |
| `message` | string | Status message |
### `table_update_row`
Update a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to update |
| `data` | object | Yes | Data to update \(partial update supported\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was updated |
| `row` | object | Updated row data |
| `message` | string | Status message |
### `table_update_rows_by_filter`
Update multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for update |
| `data` | object | Yes | Data to apply to matching rows |
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether update succeeded |
| `updatedCount` | number | Number of rows updated |
| `updatedRowIds` | array | IDs of updated rows |
| `message` | string | Status message |
### `table_delete_row`
Delete a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was deleted |
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
| `message` | string | Status message |
### `table_delete_rows_by_filter`
Delete multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for deletion |
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether delete succeeded |
| `deletedCount` | number | Number of rows deleted |
| `deletedRowIds` | array | IDs of deleted rows |
| `message` | string | Status message |
### `table_get_row`
Get a single row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was found |
| `row` | object | Row data |
| `message` | string | Status message |
### `table_get_schema`
Get the schema definition for a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether schema was retrieved |
| `name` | string | Table name |
| `columns` | array | Array of column definitions |
| `message` | string | Status message |
## Filter Operators
Filters use MongoDB-style operators for flexible querying:
| Operator | Description | Example |
|----------|-------------|---------|
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
### Combining Filters
Multiple field conditions are combined with AND logic:
```json
{
"status": "active",
"age": {"$gte": 18}
}
```
Use `$or` for OR logic:
```json
{
"$or": [
{"status": "active"},
{"status": "pending"}
]
}
```
## Sort Specification
Specify sort order with column names and direction:
```json
{
"createdAt": "desc"
}
```
Multi-column sorting:
```json
{
"priority": "desc",
"name": "asc"
}
```
## Built-in Columns
Every row automatically includes:
| Column | Type | Description |
|--------|------|-------------|
| `id` | string | Unique row identifier |
| `createdAt` | date | When the row was created |
| `updatedAt` | date | When the row was last modified |
These can be used in filters and sorting.
## Limits
| Resource | Limit |
|----------|-------|
| Tables per workspace | 100 |
| Rows per table | 10,000 |
| Columns per table | 50 |
| Max row size | 100KB |
| String value length | 10,000 characters |
| Query limit | 1,000 rows |
| Batch insert size | 1,000 rows |
| Bulk update/delete | 1,000 rows |
## Notes
- Category: `blocks`
- Type: `table`
- Tables are scoped to workspaces and accessible from any workflow within that workspace
- Data persists across workflow executions
- Use unique constraints to enable upsert functionality
- The visual filter/sort builder provides an easy way to construct queries without writing JSON

View File

@@ -163,17 +163,18 @@ export async function checkKnowledgeBaseAccess(
const kbData = kb[0]
// Case 1: User owns the knowledge base directly
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
// Case 2: Knowledge base belongs to a workspace the user has permissions for
if (kbData.workspaceId) {
// Workspace KB: use workspace permissions only
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
if (userPermission !== null) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
}
// Legacy non-workspace KB: allow owner access
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
@@ -182,8 +183,8 @@ export async function checkKnowledgeBaseAccess(
/**
* Check if a user has write access to a knowledge base
* Write access is granted if:
* 1. User owns the knowledge base directly, OR
* 2. User has write or admin permissions on the knowledge base's workspace
* 1. KB has a workspace: user has write or admin permissions on that workspace
* 2. KB has no workspace (legacy): user owns the KB directly
*/
export async function checkKnowledgeBaseWriteAccess(
knowledgeBaseId: string,
@@ -206,17 +207,18 @@ export async function checkKnowledgeBaseWriteAccess(
const kbData = kb[0]
// Case 1: User owns the knowledge base directly
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
// Case 2: Knowledge base belongs to a workspace and user has write/admin permissions
if (kbData.workspaceId) {
// Workspace KB: use workspace permissions only
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
if (userPermission === 'write' || userPermission === 'admin') {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
}
// Legacy non-workspace KB: allow owner access
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }

View File

@@ -0,0 +1,138 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteTable, type TableSchema } from '@/lib/table'
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
const logger = createLogger('TableDetailAPI')
const GetTableSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface TableRouteParams {
params: Promise<{ tableId: string }>
}
/** GET /api/table/[tableId] - Retrieves a single table's details. */
export async function GET(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table access attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
const schemaData = table.schema as TableSchema
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting table:`, error)
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
await deleteTable(tableId, requestId)
return NextResponse.json({
success: true,
data: {
message: 'Table deleted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting table:`, error)
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
}
}

View File

@@ -0,0 +1,276 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableRowAPI')
const GetRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const UpdateRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const DeleteRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface RowRouteParams {
params: Promise<{ tableId: string; rowId: string }>
}
/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */
export async function GET(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetRowSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [row] = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!row) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting row:`, error)
return NextResponse.json({ error: 'Failed to get row' }, { status: 500 })
}
}
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
// Fetch existing row to support partial updates
const [existingRow] = await db
.select({ data: userTableRows.data })
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!existingRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
// Merge existing data with incoming partial data (incoming takes precedence)
const mergedData = {
...(existingRow.data as RowData),
...(validated.data as RowData),
}
const validation = await validateRowData({
rowData: mergedData,
schema: table.schema as TableSchema,
tableId,
excludeRowId: rowId,
})
if (!validation.valid) return validation.response
const now = new Date()
const [updatedRow] = await db
.update(userTableRows)
.set({
data: mergedData,
updatedAt: now,
})
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!updatedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: updatedRow.id,
data: updatedRow.data,
createdAt: updatedRow.createdAt.toISOString(),
updatedAt: updatedRow.updatedAt.toISOString(),
},
message: 'Row updated successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating row:`, error)
return NextResponse.json({ error: 'Failed to update row' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */
export async function DELETE(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [deletedRow] = await db
.delete(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!deletedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Row deleted successfully',
deletedCount: 1,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting row:`, error)
return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 })
}
}

View File

@@ -0,0 +1,725 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { Filter, RowData, Sort, TableSchema } from '@/lib/table'
import {
checkUniqueConstraintsDb,
getUniqueColumns,
TABLE_LIMITS,
USER_TABLE_ROWS_SQL_NAME,
validateBatchRows,
validateRowAgainstSchema,
validateRowData,
validateRowSize,
} from '@/lib/table'
import { buildFilterClause, buildSortClause } from '@/lib/table/sql'
import { accessError, checkAccess } from '../../utils'
const logger = createLogger('TableRowsAPI')
const InsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const BatchInsertRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
rows: z
.array(z.record(z.unknown()), { required_error: 'Rows array is required' })
.min(1, 'At least one row is required')
.max(1000, 'Cannot insert more than 1000 rows per batch'),
})
const QueryRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown()).optional(),
sort: z.record(z.enum(['asc', 'desc'])).optional(),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
.optional()
.default(100),
offset: z.coerce
.number({ required_error: 'Offset must be a number' })
.int('Offset must be an integer')
.min(0, 'Offset must be 0 or greater')
.optional()
.default(0),
})
const UpdateRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
data: z.record(z.unknown(), { required_error: 'Update data is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot update more than 1000 rows per operation')
.optional(),
})
const DeleteRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot delete more than 1000 rows per operation')
.optional(),
})
const DeleteRowsByIdsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
rowIds: z
.array(z.string().min(1), { required_error: 'Row IDs are required' })
.min(1, 'At least one row ID is required')
.max(1000, 'Cannot delete more than 1000 rows per operation'),
})
const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema])
interface TableRowsRouteParams {
params: Promise<{ tableId: string }>
}
async function handleBatchInsert(
requestId: string,
tableId: string,
body: z.infer<typeof BatchInsertRowsSchema>,
userId: string
): Promise<NextResponse> {
const validated = BatchInsertRowsSchema.parse(body)
const accessResult = await checkAccess(tableId, userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const remainingCapacity = table.maxRows - table.rowCount
if (remainingCapacity < validated.rows.length) {
return NextResponse.json(
{
error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`,
},
{ status: 400 }
)
}
const validation = await validateBatchRows({
rows: validated.rows as RowData[],
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
const now = new Date()
const rowsToInsert = validated.rows.map((data) => ({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId,
data,
createdAt: now,
updatedAt: now,
createdBy: userId,
}))
const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning()
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
rows: insertedRows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
insertedCount: insertedRows.length,
message: `Successfully inserted ${insertedRows.length} rows`,
},
})
}
/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */
export async function POST(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
if (
typeof body === 'object' &&
body !== null &&
'rows' in body &&
Array.isArray((body as Record<string, unknown>).rows)
) {
return handleBatchInsert(
requestId,
tableId,
body as z.infer<typeof BatchInsertRowsSchema>,
authResult.userId
)
}
const validated = InsertRowSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
if (table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
const now = new Date()
const [row] = await db
.insert(userTableRows)
.values({
id: rowId,
tableId,
workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
message: 'Row inserted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error inserting row:`, error)
return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 })
}
}
/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */
export async function GET(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const filterParam = searchParams.get('filter')
const sortParam = searchParams.get('sort')
const limit = searchParams.get('limit')
const offset = searchParams.get('offset')
let filter: Record<string, unknown> | undefined
let sort: Sort | undefined
try {
if (filterParam) {
filter = JSON.parse(filterParam) as Record<string, unknown>
}
if (sortParam) {
sort = JSON.parse(sortParam) as Sort
}
} catch {
return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 })
}
const validated = QueryRowsSchema.parse({
workspaceId,
filter,
sort,
limit,
offset,
})
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
if (validated.filter) {
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
}
let query = db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.sort) {
const schema = table.schema as TableSchema
const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns)
if (sortClause) {
query = query.orderBy(sortClause) as typeof query
}
} else {
query = query.orderBy(userTableRows.createdAt) as typeof query
}
const countQuery = db
.select({ count: sql<number>`count(*)` })
.from(userTableRows)
.where(and(...baseConditions))
const [{ count: totalCount }] = await countQuery
const rows = await query.limit(validated.limit).offset(validated.offset)
logger.info(
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
)
return NextResponse.json({
success: true,
data: {
rows: rows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
rowCount: rows.length,
totalCount: Number(totalCount),
limit: validated.limit,
offset: validated.offset,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error querying rows:`, error)
return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 })
}
}
/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */
export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowsByFilterSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const updateData = validated.data as RowData
const sizeValidation = validateRowSize(updateData)
if (!sizeValidation.valid) {
return NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
)
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
if (matchingRows.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message: 'No rows matched the filter criteria',
updatedCount: 0,
},
},
{ status: 200 }
)
}
if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) {
logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`)
}
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema)
if (!rowValidation.valid) {
return NextResponse.json(
{
error: 'Updated data does not match schema',
details: rowValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// If updating multiple rows, check that updateData doesn't set any unique column
// (would cause all rows to have the same value, violating uniqueness)
if (matchingRows.length > 1) {
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData)
if (uniqueColumnsInUpdate.length > 0) {
return NextResponse.json(
{
error: 'Cannot set unique column values when updating multiple rows',
details: [
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`,
],
},
{ status: 400 }
)
}
}
// Check unique constraints against database for each row
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const uniqueValidation = await checkUniqueConstraintsDb(
tableId,
mergedData,
table.schema as TableSchema,
row.id
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{
error: 'Unique constraint violation',
details: uniqueValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
}
const now = new Date()
await db.transaction(async (trx) => {
let totalUpdated = 0
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
const updatePromises = batch.map((row) => {
const existingData = row.data as RowData
return trx
.update(userTableRows)
.set({
data: { ...existingData, ...updateData },
updatedAt: now,
})
.where(eq(userTableRows.id, row.id))
})
await Promise.all(updatePromises)
totalUpdated += batch.length
logger.info(
`[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)`
)
}
})
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows updated successfully',
updatedCount: matchingRows.length,
updatedRowIds: matchingRows.map((r) => r.id),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to update rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */
export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowsRequestSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
let rowIds: string[] = []
let missingRowIds: string[] | undefined
let requestedCount: number | undefined
if ('rowIds' in validated) {
const uniqueRequestedRowIds = Array.from(new Set(validated.rowIds))
requestedCount = uniqueRequestedRowIds.length
const matchingRows = await db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(
and(
...baseConditions,
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
uniqueRequestedRowIds.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
const matchedRowIds = matchingRows.map((r) => r.id)
const matchedIdSet = new Set(matchedRowIds)
missingRowIds = uniqueRequestedRowIds.filter((id) => !matchedIdSet.has(id))
rowIds = matchedRowIds
} else {
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
rowIds = matchingRows.map((r) => r.id)
}
if (rowIds.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message:
'rowIds' in validated
? 'No matching rows found for the provided IDs'
: 'No rows matched the filter criteria',
deletedCount: 0,
deletedRowIds: [],
...(requestedCount !== undefined ? { requestedCount } : {}),
...(missingRowIds ? { missingRowIds } : {}),
},
},
{ status: 200 }
)
}
if (rowIds.length > TABLE_LIMITS.DELETE_BATCH_SIZE) {
logger.warn(`[${requestId}] Deleting ${rowIds.length} rows. This may take some time.`)
}
await db.transaction(async (trx) => {
let totalDeleted = 0
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
await trx.delete(userTableRows).where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
batch.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
totalDeleted += batch.length
logger.info(
`[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)`
)
}
})
logger.info(`[${requestId}] Deleted ${rowIds.length} rows from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows deleted successfully',
deletedCount: rowIds.length,
deletedRowIds: rowIds,
...(requestedCount !== undefined ? { requestedCount } : {}),
...(missingRowIds ? { missingRowIds } : {}),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to delete rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -0,0 +1,182 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { getUniqueColumns, validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableUpsertAPI')
const UpsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
interface UpsertRouteParams {
params: Promise<{ tableId: string }>
}
/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */
export async function POST(request: NextRequest, { params }: UpsertRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpsertRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const schema = table.schema as TableSchema
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema,
tableId,
checkUnique: false,
})
if (!validation.valid) return validation.response
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length === 0) {
return NextResponse.json(
{
error:
'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.',
},
{ status: 400 }
)
}
const uniqueFilters = uniqueColumns.map((col) => {
const value = rowData[col.name]
if (value === undefined || value === null) {
return null
}
return sql`${userTableRows.data}->>${col.name} = ${String(value)}`
})
const validUniqueFilters = uniqueFilters.filter((f): f is Exclude<typeof f, null> => f !== null)
if (validUniqueFilters.length === 0) {
return NextResponse.json(
{
error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`,
},
{ status: 400 }
)
}
const [existingRow] = await db
.select()
.from(userTableRows)
.where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
or(...validUniqueFilters)
)
)
.limit(1)
const now = new Date()
if (!existingRow && table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const upsertResult = await db.transaction(async (trx) => {
if (existingRow) {
const [updatedRow] = await trx
.update(userTableRows)
.set({
data: validated.data,
updatedAt: now,
})
.where(eq(userTableRows.id, existingRow.id))
.returning()
return {
row: updatedRow,
operation: 'update' as const,
}
}
const [insertedRow] = await trx
.insert(userTableRows)
.values({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId: validated.workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
return {
row: insertedRow,
operation: 'insert' as const,
}
})
logger.info(
`[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}`
)
return NextResponse.json({
success: true,
data: {
row: {
id: upsertResult.row.id,
data: upsertResult.row.data,
createdAt: upsertResult.row.createdAt.toISOString(),
updatedAt: upsertResult.row.updatedAt.toISOString(),
},
operation: upsertResult.operation,
message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error upserting row:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to upsert row: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -0,0 +1,258 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import {
canCreateTable,
createTable,
getWorkspaceTableLimits,
listTables,
TABLE_LIMITS,
type TableSchema,
} from '@/lib/table'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { normalizeColumn } from './utils'
const logger = createLogger('TableAPI')
const ColumnSchema = z.object({
name: z
.string()
.min(1, 'Column name is required')
.max(
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
errorMap: () => ({
message: 'Column type must be one of: string, number, boolean, date, json',
}),
}),
required: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
})
const CreateTableSchema = z.object({
name: z
.string()
.min(1, 'Table name is required')
.max(
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
description: z
.string()
.max(
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
)
.optional(),
schema: z.object({
columns: z
.array(ColumnSchema)
.min(1, 'Table must have at least one column')
.max(
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
),
}),
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const ListTablesSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface WorkspaceAccessResult {
hasAccess: boolean
canWrite: boolean
}
async function checkWorkspaceAccess(
workspaceId: string,
userId: string
): Promise<WorkspaceAccessResult> {
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
if (permission === null) {
return { hasAccess: false, canWrite: false }
}
const canWrite = permission === 'admin' || permission === 'write'
return { hasAccess: true, canWrite }
}
/** POST /api/table - Creates a new user-defined table. */
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const params = CreateTableSchema.parse(body)
const { hasAccess, canWrite } = await checkWorkspaceAccess(
params.workspaceId,
authResult.userId
)
if (!hasAccess || !canWrite) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Check billing plan limits
const existingTables = await listTables(params.workspaceId)
const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length)
if (!canCreate) {
return NextResponse.json(
{
error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`,
},
{ status: 403 }
)
}
// Get plan-based row limits
const planLimits = await getWorkspaceTableLimits(params.workspaceId)
const maxRowsPerTable = planLimits.maxRowsPerTable
const normalizedSchema: TableSchema = {
columns: params.schema.columns.map(normalizeColumn),
}
const table = await createTable(
{
name: params.name,
description: params.description,
schema: normalizedSchema,
workspaceId: params.workspaceId,
userId: authResult.userId,
maxRows: maxRowsPerTable,
},
requestId
)
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: table.schema,
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
message: 'Table created successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
if (error instanceof Error) {
if (
error.message.includes('Invalid table name') ||
error.message.includes('Invalid schema') ||
error.message.includes('already exists') ||
error.message.includes('maximum table limit')
) {
return NextResponse.json({ error: error.message }, { status: 400 })
}
}
logger.error(`[${requestId}] Error creating table:`, error)
return NextResponse.json({ error: 'Failed to create table' }, { status: 500 })
}
}
/** GET /api/table - Lists all tables in a workspace. */
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const validation = ListTablesSchema.safeParse({ workspaceId })
if (!validation.success) {
return NextResponse.json(
{ error: 'Validation error', details: validation.error.errors },
{ status: 400 }
)
}
const params = validation.data
const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId)
if (!hasAccess) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const tables = await listTables(params.workspaceId)
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
return NextResponse.json({
success: true,
data: {
tables: tables.map((t) => {
const schemaData = t.schema as TableSchema
return {
...t,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
createdAt:
t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt),
updatedAt:
t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt),
}
}),
totalCount: tables.length,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error listing tables:`, error)
return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 })
}
}

View File

@@ -0,0 +1,164 @@
import { createLogger } from '@sim/logger'
import { NextResponse } from 'next/server'
import type { ColumnDefinition, TableDefinition } from '@/lib/table'
import { getTableById } from '@/lib/table'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('TableUtils')
export interface TableAccessResult {
hasAccess: true
table: TableDefinition
}
export interface TableAccessDenied {
hasAccess: false
notFound?: boolean
reason?: string
}
export type TableAccessCheck = TableAccessResult | TableAccessDenied
export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 }
export interface ApiErrorResponse {
error: string
details?: unknown
}
/**
* Check if a user has read access to a table.
* Read access requires any workspace permission (read, write, or admin).
*/
export async function checkTableAccess(tableId: string, userId: string): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission !== null) {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have access to this table' }
}
/**
* Check if a user has write access to a table.
* Write access requires write or admin workspace permission.
*/
export async function checkTableWriteAccess(
tableId: string,
userId: string
): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission === 'write' || userPermission === 'admin') {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have write access to this table' }
}
/**
* Access check returning `{ ok, table }` or `{ ok: false, status }`.
* Uses workspace permissions only.
*/
export async function checkAccess(
tableId: string,
userId: string,
level: 'read' | 'write' | 'admin' = 'read'
): Promise<AccessResult> {
const table = await getTableById(tableId)
if (!table) {
return { ok: false, status: 404 }
}
const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
const hasAccess =
permission !== null &&
(level === 'read' ||
(level === 'write' && (permission === 'write' || permission === 'admin')) ||
(level === 'admin' && permission === 'admin'))
return hasAccess ? { ok: true, table } : { ok: false, status: 403 }
}
export function accessError(
result: { ok: false; status: 404 | 403 },
requestId: string,
context?: string
): NextResponse {
const message = result.status === 404 ? 'Table not found' : 'Access denied'
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status: result.status })
}
/**
* Converts a TableAccessDenied result to an appropriate HTTP response.
* Use with checkTableAccess or checkTableWriteAccess.
*/
export function tableAccessError(
result: TableAccessDenied,
requestId: string,
context?: string
): NextResponse {
const status = result.notFound ? 404 : 403
const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied')
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status })
}
export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise<boolean> {
const table = await getTableById(tableId)
return table?.workspaceId === workspaceId
}
export function errorResponse(
message: string,
status: number,
details?: unknown
): NextResponse<ApiErrorResponse> {
const body: ApiErrorResponse = { error: message }
if (details !== undefined) {
body.details = details
}
return NextResponse.json(body, { status })
}
export function badRequestResponse(message: string, details?: unknown) {
return errorResponse(message, 400, details)
}
export function unauthorizedResponse(message = 'Authentication required') {
return errorResponse(message, 401)
}
export function forbiddenResponse(message = 'Access denied') {
return errorResponse(message, 403)
}
export function notFoundResponse(message = 'Resource not found') {
return errorResponse(message, 404)
}
export function serverErrorResponse(message = 'Internal server error') {
return errorResponse(message, 500)
}
export function normalizeColumn(col: ColumnDefinition): ColumnDefinition {
return {
name: col.name,
type: col.type,
required: col.required ?? false,
unique: col.unique ?? false,
}
}

View File

@@ -10,6 +10,7 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { env } from '@/lib/core/config/env'
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request'
import { enrichTableSchema } from '@/lib/table/llm/wand'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { extractResponseText, parseResponsesUsage } from '@/providers/openai/utils'
import { getModelPricing } from '@/providers/utils'
@@ -48,6 +49,7 @@ interface RequestBody {
history?: ChatMessage[]
workflowId?: string
generationType?: string
wandContext?: Record<string, unknown>
}
function safeStringify(value: unknown): string {
@@ -58,6 +60,38 @@ function safeStringify(value: unknown): string {
}
}
/**
* Wand enricher function type.
* Enrichers add context to the system prompt based on generationType.
*/
type WandEnricher = (
workspaceId: string | null,
context: Record<string, unknown>
) => Promise<string | null>
/**
* Registry of wand enrichers by generationType.
* Each enricher returns additional context to append to the system prompt.
*/
const wandEnrichers: Partial<Record<string, WandEnricher>> = {
timestamp: async () => {
const now = new Date()
return `Current date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
},
'table-schema': enrichTableSchema,
}
async function updateUserStatsForWand(
userId: string,
usage: {
@@ -147,7 +181,15 @@ export async function POST(req: NextRequest) {
try {
const body = (await req.json()) as RequestBody
const { prompt, systemPrompt, stream = false, history = [], workflowId, generationType } = body
const {
prompt,
systemPrompt,
stream = false,
history = [],
workflowId,
generationType,
wandContext = {},
} = body
if (!prompt) {
logger.warn(`[${requestId}] Invalid request: Missing prompt.`)
@@ -222,20 +264,15 @@ export async function POST(req: NextRequest) {
systemPrompt ||
'You are a helpful AI assistant. Generate content exactly as requested by the user.'
if (generationType === 'timestamp') {
const now = new Date()
const currentTimeContext = `\n\nCurrent date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
finalSystemPrompt += currentTimeContext
// Apply enricher if one exists for this generationType
if (generationType) {
const enricher = wandEnrichers[generationType]
if (enricher) {
const enrichment = await enricher(workspaceId, wandContext)
if (enrichment) {
finalSystemPrompt += `\n\n${enrichment}`
}
}
}
if (generationType === 'cron-expression') {

View File

@@ -0,0 +1,31 @@
'use client'
import { Trash2, X } from 'lucide-react'
import { Button } from '@/components/emcn'
interface ActionBarProps {
selectedCount: number
onDelete: () => void
onClearSelection: () => void
}
export function ActionBar({ selectedCount, onDelete, onClearSelection }: ActionBarProps) {
return (
<div className='flex h-[36px] shrink-0 items-center justify-between border-[var(--border)] border-b bg-[var(--surface-4)] px-[16px]'>
<div className='flex items-center gap-[12px]'>
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
{selectedCount} {selectedCount === 1 ? 'row' : 'rows'} selected
</span>
<Button variant='ghost' size='sm' onClick={onClearSelection}>
<X className='mr-[4px] h-[10px] w-[10px]' />
Clear
</Button>
</div>
<Button variant='destructive' size='sm' onClick={onDelete}>
<Trash2 className='mr-[4px] h-[10px] w-[10px]' />
Delete
</Button>
</div>
)
}

View File

@@ -0,0 +1,72 @@
import { Plus } from 'lucide-react'
import { Button, TableCell, TableRow } from '@/components/emcn'
import { Skeleton } from '@/components/ui/skeleton'
import type { ColumnDefinition } from '@/lib/table'
interface LoadingRowsProps {
columns: ColumnDefinition[]
}
export function LoadingRows({ columns }: LoadingRowsProps) {
return (
<>
{Array.from({ length: 25 }).map((_, rowIndex) => (
<TableRow key={rowIndex}>
<TableCell>
<Skeleton className='h-[14px] w-[14px]' />
</TableCell>
{columns.map((col, colIndex) => {
const baseWidth =
col.type === 'json'
? 200
: col.type === 'string'
? 160
: col.type === 'number'
? 80
: col.type === 'boolean'
? 50
: col.type === 'date'
? 100
: 120
const variation = ((rowIndex + colIndex) % 3) * 20
const width = baseWidth + variation
return (
<TableCell key={col.name}>
<Skeleton className='h-[16px]' style={{ width: `${width}px` }} />
</TableCell>
)
})}
</TableRow>
))}
</>
)
}
interface EmptyRowsProps {
columnCount: number
hasFilter: boolean
onAddRow: () => void
}
export function EmptyRows({ columnCount, hasFilter, onAddRow }: EmptyRowsProps) {
return (
<TableRow>
<TableCell colSpan={columnCount + 1} className='h-[160px]'>
<div className='-translate-x-1/2 fixed left-1/2'>
<div className='flex flex-col items-center gap-[12px]'>
<span className='text-[13px] text-[var(--text-tertiary)]'>
{hasFilter ? 'No rows match your filter' : 'No data'}
</span>
{!hasFilter && (
<Button variant='default' size='sm' onClick={onAddRow}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add first row
</Button>
)}
</div>
</div>
</TableCell>
</TableRow>
)
}

View File

@@ -0,0 +1,99 @@
import type { ColumnDefinition } from '@/lib/table'
import { STRING_TRUNCATE_LENGTH } from '../lib/constants'
import type { CellViewerData } from '../lib/types'
interface CellRendererProps {
value: unknown
column: ColumnDefinition
onCellClick: (columnName: string, value: unknown, type: CellViewerData['type']) => void
}
export function CellRenderer({ value, column, onCellClick }: CellRendererProps) {
const isNull = value === null || value === undefined
if (isNull) {
return <span className='text-[var(--text-muted)] italic'></span>
}
if (column.type === 'json') {
const jsonStr = JSON.stringify(value)
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate rounded-[4px] border border-[var(--border-1)] px-[6px] py-[2px] text-left font-mono text-[11px] text-[var(--text-secondary)] transition-colors hover:border-[var(--text-muted)] hover:text-[var(--text-primary)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'json')
}}
title='Click to view full JSON'
>
{jsonStr}
</button>
)
}
if (column.type === 'boolean') {
const boolValue = Boolean(value)
return (
<span className={boolValue ? 'text-green-500' : 'text-[var(--text-tertiary)]'}>
{boolValue ? 'true' : 'false'}
</span>
)
}
if (column.type === 'number') {
return (
<span className='font-mono text-[12px] text-[var(--text-secondary)]'>{String(value)}</span>
)
}
if (column.type === 'date') {
try {
const date = new Date(String(value))
const formatted = date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
return (
<button
type='button'
className='cursor-pointer select-none text-left text-[12px] text-[var(--text-secondary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:text-[var(--text-primary)] hover:decoration-[var(--text-muted)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'date')
}}
title='Click to view ISO format'
>
{formatted}
</button>
)
} catch {
return <span className='text-[var(--text-primary)]'>{String(value)}</span>
}
}
const strValue = String(value)
if (strValue.length > STRING_TRUNCATE_LENGTH) {
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate text-left text-[var(--text-primary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:decoration-[var(--text-muted)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'text')
}}
title='Click to view full text'
>
{strValue}
</button>
)
}
return <span className='text-[var(--text-primary)]'>{strValue}</span>
}

View File

@@ -0,0 +1,84 @@
import { Copy, X } from 'lucide-react'
import { Badge, Button, Modal, ModalBody, ModalContent } from '@/components/emcn'
import type { CellViewerData } from '../lib/types'
interface CellViewerModalProps {
cellViewer: CellViewerData | null
onClose: () => void
onCopy: () => void
copied: boolean
}
export function CellViewerModal({ cellViewer, onClose, onCopy, copied }: CellViewerModalProps) {
if (!cellViewer) return null
return (
<Modal open={!!cellViewer} onOpenChange={(open) => !open && onClose()}>
<ModalContent className='w-[640px] duration-100'>
<div className='flex items-center justify-between gap-[8px] px-[16px] py-[10px]'>
<div className='flex min-w-0 items-center gap-[8px]'>
<span className='truncate font-medium text-[14px] text-[var(--text-primary)]'>
{cellViewer.columnName}
</span>
<Badge
variant={
cellViewer.type === 'json' ? 'blue' : cellViewer.type === 'date' ? 'purple' : 'gray'
}
size='sm'
>
{cellViewer.type === 'json' ? 'JSON' : cellViewer.type === 'date' ? 'Date' : 'Text'}
</Badge>
</div>
<div className='flex shrink-0 items-center gap-[8px]'>
<Button variant={copied ? 'tertiary' : 'default'} size='sm' onClick={onCopy}>
<Copy className='mr-[4px] h-[12px] w-[12px]' />
{copied ? 'Copied!' : 'Copy'}
</Button>
<Button variant='ghost' size='sm' onClick={onClose}>
<X className='h-[14px] w-[14px]' />
</Button>
</div>
</div>
<ModalBody className='p-0'>
{cellViewer.type === 'json' ? (
<pre className='m-[16px] max-h-[450px] overflow-auto rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] font-mono text-[12px] text-[var(--text-primary)] leading-[1.6]'>
{JSON.stringify(cellViewer.value, null, 2)}
</pre>
) : cellViewer.type === 'date' ? (
<div className='m-[16px] space-y-[12px]'>
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
Formatted
</div>
<div className='text-[14px] text-[var(--text-primary)]'>
{new Date(String(cellViewer.value)).toLocaleDateString('en-US', {
weekday: 'long',
year: 'numeric',
month: 'long',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
timeZoneName: 'short',
})}
</div>
</div>
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
ISO Format
</div>
<div className='font-mono text-[13px] text-[var(--text-secondary)]'>
{String(cellViewer.value)}
</div>
</div>
</div>
) : (
<div className='m-[16px] max-h-[450px] overflow-auto whitespace-pre-wrap break-words rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] text-[13px] text-[var(--text-primary)] leading-[1.7]'>
{String(cellViewer.value)}
</div>
)}
</ModalBody>
</ModalContent>
</Modal>
)
}

View File

@@ -0,0 +1,49 @@
import { Edit, Trash2 } from 'lucide-react'
import {
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import type { ContextMenuState } from '../lib/types'
interface ContextMenuProps {
contextMenu: ContextMenuState
onClose: () => void
onEdit: () => void
onDelete: () => void
}
export function ContextMenu({ contextMenu, onClose, onEdit, onDelete }: ContextMenuProps) {
return (
<Popover
open={contextMenu.isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${contextMenu.position.x}px`,
top: `${contextMenu.position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={onEdit}>
<Edit className='mr-[8px] h-[12px] w-[12px]' />
Edit row
</PopoverItem>
<PopoverDivider />
<PopoverItem onClick={onDelete} className='text-[var(--text-error)]'>
<Trash2 className='mr-[8px] h-[12px] w-[12px]' />
Delete row
</PopoverItem>
</PopoverContent>
</Popover>
)
}

View File

@@ -0,0 +1,63 @@
import { Info, RefreshCw } from 'lucide-react'
import { Badge, Button, Tooltip } from '@/components/emcn'
import { Skeleton } from '@/components/ui/skeleton'
interface HeaderBarProps {
tableName: string
totalCount: number
isLoading: boolean
onNavigateBack: () => void
onShowSchema: () => void
onRefresh: () => void
}
export function HeaderBar({
tableName,
totalCount,
isLoading,
onNavigateBack,
onShowSchema,
onRefresh,
}: HeaderBarProps) {
return (
<div className='flex h-[48px] shrink-0 items-center justify-between border-[var(--border)] border-b px-[16px]'>
<div className='flex items-center gap-[8px]'>
<button
onClick={onNavigateBack}
className='text-[13px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
Tables
</button>
<span className='text-[var(--text-muted)]'>/</span>
<span className='font-medium text-[13px] text-[var(--text-primary)]'>{tableName}</span>
{isLoading ? (
<Skeleton className='h-[18px] w-[60px] rounded-full' />
) : (
<Badge variant='gray-secondary' size='sm'>
{totalCount} {totalCount === 1 ? 'row' : 'rows'}
</Badge>
)}
</div>
<div className='flex items-center gap-[8px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={onShowSchema}>
<Info className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>View Schema</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={onRefresh}>
<RefreshCw className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Refresh</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
)
}

View File

@@ -0,0 +1,11 @@
export * from './action-bar'
export * from './body-states'
export * from './cell-renderer'
export * from './cell-viewer-modal'
export * from './context-menu'
export * from './header-bar'
export * from './pagination'
export * from './query-builder'
export * from './row-modal'
export * from './schema-modal'
export * from './table-viewer'

View File

@@ -0,0 +1,40 @@
import { Button } from '@/components/emcn'
interface PaginationProps {
currentPage: number
totalPages: number
totalCount: number
onPreviousPage: () => void
onNextPage: () => void
}
export function Pagination({
currentPage,
totalPages,
totalCount,
onPreviousPage,
onNextPage,
}: PaginationProps) {
if (totalPages <= 1) return null
return (
<div className='flex h-[40px] shrink-0 items-center justify-between border-[var(--border)] border-t px-[16px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>
Page {currentPage + 1} of {totalPages} ({totalCount} rows)
</span>
<div className='flex items-center gap-[4px]'>
<Button variant='ghost' size='sm' onClick={onPreviousPage} disabled={currentPage === 0}>
Previous
</Button>
<Button
variant='ghost'
size='sm'
onClick={onNextPage}
disabled={currentPage === totalPages - 1}
>
Next
</Button>
</div>
</div>
)
}

View File

@@ -0,0 +1,89 @@
'use client'
import { X } from 'lucide-react'
import { Button, Combobox, Input } from '@/components/emcn'
import type { FilterRule } from '@/lib/table/query-builder/constants'
interface FilterRowProps {
rule: FilterRule
index: number
columnOptions: Array<{ value: string; label: string }>
comparisonOptions: Array<{ value: string; label: string }>
logicalOptions: Array<{ value: string; label: string }>
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
onRemove: (id: string) => void
onApply: () => void
}
export function FilterRow({
rule,
index,
columnOptions,
comparisonOptions,
logicalOptions,
onUpdate,
onRemove,
onApply,
}: FilterRowProps) {
return (
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
className='h-[28px] w-[28px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
{index === 0 ? (
<Combobox
size='sm'
options={[{ value: 'where', label: 'where' }]}
value='where'
disabled
/>
) : (
<Combobox
size='sm'
options={logicalOptions}
value={rule.logicalOperator}
onChange={(value) => onUpdate(rule.id, 'logicalOperator', value as 'and' | 'or')}
/>
)}
</div>
<div className='w-[140px] shrink-0'>
<Combobox
size='sm'
options={columnOptions}
value={rule.column}
onChange={(value) => onUpdate(rule.id, 'column', value)}
placeholder='Column'
/>
</div>
<div className='w-[130px] shrink-0'>
<Combobox
size='sm'
options={comparisonOptions}
value={rule.operator}
onChange={(value) => onUpdate(rule.id, 'operator', value)}
/>
</div>
<Input
className='h-[28px] min-w-[200px] flex-1 text-[12px]'
value={rule.value}
onChange={(e) => onUpdate(rule.id, 'value', e.target.value)}
placeholder='Value'
onKeyDown={(e) => {
if (e.key === 'Enter') {
onApply()
}
}}
/>
</div>
)
}

View File

@@ -0,0 +1,137 @@
'use client'
import { useCallback, useMemo, useState } from 'react'
import { ArrowUpAZ, Loader2, Plus } from 'lucide-react'
import { nanoid } from 'nanoid'
import { Button } from '@/components/emcn'
import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants'
import { filterRulesToFilter, sortRuleToSort } from '@/lib/table/query-builder/converters'
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
import type { ColumnDefinition } from '@/lib/table/types'
import type { QueryOptions } from '../../lib/types'
import { FilterRow } from './filter-row'
import { SortRow } from './sort-row'
type Column = Pick<ColumnDefinition, 'name' | 'type'>
interface QueryBuilderProps {
columns: Column[]
onApply: (options: QueryOptions) => void
onAddRow: () => void
isLoading?: boolean
}
export function QueryBuilder({ columns, onApply, onAddRow, isLoading = false }: QueryBuilderProps) {
const [rules, setRules] = useState<FilterRule[]>([])
const [sortRule, setSortRule] = useState<SortRule | null>(null)
const columnOptions = useMemo(
() => columns.map((col) => ({ value: col.name, label: col.name })),
[columns]
)
const {
comparisonOptions,
logicalOptions,
sortDirectionOptions,
addRule: handleAddRule,
removeRule: handleRemoveRule,
updateRule: handleUpdateRule,
} = useFilterBuilder({
columns: columnOptions,
rules,
setRules,
})
const handleAddSort = useCallback(() => {
setSortRule({
id: nanoid(),
column: columns[0]?.name || '',
direction: 'asc',
})
}, [columns])
const handleRemoveSort = useCallback(() => {
setSortRule(null)
}, [])
const handleApply = useCallback(() => {
const filter = filterRulesToFilter(rules)
const sort = sortRuleToSort(sortRule)
onApply({ filter, sort })
}, [rules, sortRule, onApply])
const handleClear = useCallback(() => {
setRules([])
setSortRule(null)
onApply({
filter: null,
sort: null,
})
}, [onApply])
const hasChanges = rules.length > 0 || sortRule !== null
return (
<div className='flex flex-col gap-[8px]'>
{rules.map((rule, index) => (
<FilterRow
key={rule.id}
rule={rule}
index={index}
columnOptions={columnOptions}
comparisonOptions={comparisonOptions}
logicalOptions={logicalOptions}
onUpdate={handleUpdateRule}
onRemove={handleRemoveRule}
onApply={handleApply}
/>
))}
{sortRule && (
<SortRow
sortRule={sortRule}
columnOptions={columnOptions}
sortDirectionOptions={sortDirectionOptions}
onChange={setSortRule}
onRemove={handleRemoveSort}
/>
)}
<div className='flex items-center gap-[8px]'>
<Button variant='default' size='sm' onClick={onAddRow}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add row
</Button>
<Button variant='default' size='sm' onClick={handleAddRule}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add filter
</Button>
{!sortRule && (
<Button variant='default' size='sm' onClick={handleAddSort}>
<ArrowUpAZ className='mr-[4px] h-[12px] w-[12px]' />
Add sort
</Button>
)}
{hasChanges && (
<>
<Button variant='default' size='sm' onClick={handleApply} disabled={isLoading}>
{isLoading && <Loader2 className='mr-[4px] h-[12px] w-[12px] animate-spin' />}
{isLoading ? 'Applying...' : 'Apply'}
</Button>
<button
onClick={handleClear}
className='text-[12px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
Clear all
</button>
</>
)}
</div>
</div>
)
}

View File

@@ -0,0 +1,65 @@
'use client'
import { ArrowDownAZ, ArrowUpAZ, X } from 'lucide-react'
import { Button, Combobox } from '@/components/emcn'
import type { SortRule } from '@/lib/table/query-builder/constants'
interface SortRowProps {
sortRule: SortRule
columnOptions: Array<{ value: string; label: string }>
sortDirectionOptions: Array<{ value: string; label: string }>
onChange: (rule: SortRule | null) => void
onRemove: () => void
}
export function SortRow({
sortRule,
columnOptions,
sortDirectionOptions,
onChange,
onRemove,
}: SortRowProps) {
return (
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={onRemove}
className='h-[28px] w-[28px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
<Combobox size='sm' options={[{ value: 'order', label: 'order' }]} value='order' disabled />
</div>
<div className='w-[140px] shrink-0'>
<Combobox
size='sm'
options={columnOptions}
value={sortRule.column}
onChange={(value) => onChange({ ...sortRule, column: value })}
placeholder='Column'
/>
</div>
<div className='w-[130px] shrink-0'>
<Combobox
size='sm'
options={sortDirectionOptions}
value={sortRule.direction}
onChange={(value) => onChange({ ...sortRule, direction: value as 'asc' | 'desc' })}
/>
</div>
<div className='flex items-center text-[12px] text-[var(--text-tertiary)]'>
{sortRule.direction === 'asc' ? (
<ArrowUpAZ className='h-[14px] w-[14px]' />
) : (
<ArrowDownAZ className='h-[14px] w-[14px]' />
)}
</div>
</div>
)
}

View File

@@ -0,0 +1,363 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { AlertCircle } from 'lucide-react'
import { useParams } from 'next/navigation'
import {
Button,
Checkbox,
Input,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
import {
useCreateTableRow,
useDeleteTableRow,
useDeleteTableRows,
useUpdateTableRow,
} from '@/hooks/queries/tables'
const logger = createLogger('RowModal')
export interface RowModalProps {
mode: 'add' | 'edit' | 'delete'
isOpen: boolean
onClose: () => void
table: TableInfo
row?: TableRow
rowIds?: string[]
onSuccess: () => void
}
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
const initial: Record<string, unknown> = {}
columns.forEach((col) => {
if (col.type === 'boolean') {
initial[col.name] = false
} else {
initial[col.name] = ''
}
})
return initial
}
function cleanRowData(
columns: ColumnDefinition[],
rowData: Record<string, unknown>
): Record<string, unknown> {
const cleanData: Record<string, unknown> = {}
columns.forEach((col) => {
const value = rowData[col.name]
if (col.type === 'number') {
cleanData[col.name] = value === '' ? null : Number(value)
} else if (col.type === 'json') {
if (typeof value === 'string') {
if (value === '') {
cleanData[col.name] = null
} else {
try {
cleanData[col.name] = JSON.parse(value)
} catch {
throw new Error(`Invalid JSON for field: ${col.name}`)
}
}
} else {
cleanData[col.name] = value
}
} else if (col.type === 'boolean') {
cleanData[col.name] = Boolean(value)
} else {
cleanData[col.name] = value || null
}
})
return cleanData
}
function formatValueForInput(value: unknown, type: string): string {
if (value === null || value === undefined) return ''
if (type === 'json') {
return typeof value === 'string' ? value : JSON.stringify(value, null, 2)
}
if (type === 'date' && value) {
try {
const date = new Date(String(value))
return date.toISOString().split('T')[0]
} catch {
return String(value)
}
}
return String(value)
}
function getInitialRowData(
mode: RowModalProps['mode'],
columns: ColumnDefinition[],
row?: TableRow
): Record<string, unknown> {
if (mode === 'add' && columns.length > 0) {
return createInitialRowData(columns)
}
if (mode === 'edit' && row) {
return row.data
}
return {}
}
export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess }: RowModalProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const tableId = table.id
const schema = table?.schema
const columns = schema?.columns || []
const [rowData, setRowData] = useState<Record<string, unknown>>(() =>
getInitialRowData(mode, columns, row)
)
const [error, setError] = useState<string | null>(null)
const createRowMutation = useCreateTableRow({ workspaceId, tableId })
const updateRowMutation = useUpdateTableRow({ workspaceId, tableId })
const deleteRowMutation = useDeleteTableRow({ workspaceId, tableId })
const deleteRowsMutation = useDeleteTableRows({ workspaceId, tableId })
const isSubmitting =
createRowMutation.isPending ||
updateRowMutation.isPending ||
deleteRowMutation.isPending ||
deleteRowsMutation.isPending
const handleFormSubmit = async (e: React.FormEvent) => {
e.preventDefault()
setError(null)
try {
const cleanData = cleanRowData(columns, rowData)
if (mode === 'add') {
await createRowMutation.mutateAsync(cleanData)
} else if (mode === 'edit' && row) {
await updateRowMutation.mutateAsync({ rowId: row.id, data: cleanData })
}
onSuccess()
} catch (err) {
logger.error(`Failed to ${mode} row:`, err)
setError(err instanceof Error ? err.message : `Failed to ${mode} row`)
}
}
const handleDelete = async () => {
setError(null)
const idsToDelete = rowIds ?? (row ? [row.id] : [])
try {
if (idsToDelete.length === 1) {
await deleteRowMutation.mutateAsync(idsToDelete[0])
} else {
await deleteRowsMutation.mutateAsync(idsToDelete)
}
onSuccess()
} catch (err) {
logger.error('Failed to delete row(s):', err)
setError(err instanceof Error ? err.message : 'Failed to delete row(s)')
}
}
const handleClose = () => {
setRowData({})
setError(null)
onClose()
}
// Delete mode UI
if (mode === 'delete') {
const deleteCount = rowIds?.length ?? (row ? 1 : 0)
const isSingleRow = deleteCount === 1
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent className='w-[480px]'>
<ModalHeader>
<div className='flex items-center gap-[10px]'>
<div className='flex h-[36px] w-[36px] items-center justify-center rounded-[8px] bg-[var(--bg-error)] text-[var(--text-error)]'>
<AlertCircle className='h-[18px] w-[18px]' />
</div>
<h2 className='font-semibold text-[16px]'>
Delete {isSingleRow ? 'Row' : `${deleteCount} Rows`}
</h2>
</div>
</ModalHeader>
<ModalBody>
<div className='flex flex-col gap-[16px]'>
<ErrorMessage error={error} />
<p className='text-[14px] text-[var(--text-secondary)]'>
Are you sure you want to delete {isSingleRow ? 'this row' : 'these rows'}? This
action cannot be undone.
</p>
</div>
</ModalBody>
<ModalFooter className='gap-[10px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
className='min-w-[90px]'
disabled={isSubmitting}
>
Cancel
</Button>
<Button
type='button'
variant='destructive'
onClick={handleDelete}
disabled={isSubmitting}
className='min-w-[120px]'
>
{isSubmitting ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}
const isAddMode = mode === 'add'
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent className='w-[600px]'>
<ModalHeader>
<div className='flex flex-col gap-[4px]'>
<h2 className='font-semibold text-[16px]'>{isAddMode ? 'Add New Row' : 'Edit Row'}</h2>
<p className='font-normal text-[13px] text-[var(--text-tertiary)]'>
{isAddMode ? 'Fill in the values for' : 'Update values for'} {table?.name ?? 'table'}
</p>
</div>
</ModalHeader>
<ModalBody className='max-h-[60vh] overflow-y-auto'>
<form onSubmit={handleFormSubmit} className='flex flex-col gap-[16px]'>
<ErrorMessage error={error} />
{columns.map((column) => (
<ColumnField
key={column.name}
column={column}
value={rowData[column.name]}
onChange={(value) => setRowData((prev) => ({ ...prev, [column.name]: value }))}
/>
))}
</form>
</ModalBody>
<ModalFooter className='gap-[10px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
className='min-w-[90px]'
disabled={isSubmitting}
>
Cancel
</Button>
<Button
type='button'
variant='tertiary'
onClick={handleFormSubmit}
disabled={isSubmitting}
className='min-w-[120px]'
>
{isSubmitting
? isAddMode
? 'Adding...'
: 'Updating...'
: isAddMode
? 'Add Row'
: 'Update Row'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}
function ErrorMessage({ error }: { error: string | null }) {
if (!error) return null
return (
<div className='rounded-[8px] border border-[var(--status-error-border)] bg-[var(--status-error-bg)] px-[14px] py-[12px] text-[13px] text-[var(--status-error-text)]'>
{error}
</div>
)
}
interface ColumnFieldProps {
column: ColumnDefinition
value: unknown
onChange: (value: unknown) => void
}
function ColumnField({ column, value, onChange }: ColumnFieldProps) {
return (
<div className='flex flex-col gap-[8px]'>
<Label htmlFor={column.name} className='font-medium text-[13px]'>
{column.name}
{column.required && <span className='text-[var(--text-error)]'> *</span>}
{column.unique && (
<span className='ml-[6px] font-normal text-[11px] text-[var(--text-tertiary)]'>
(unique)
</span>
)}
</Label>
{column.type === 'boolean' ? (
<div className='flex items-center gap-[8px]'>
<Checkbox
id={column.name}
checked={Boolean(value)}
onCheckedChange={(checked) => onChange(checked === true)}
/>
<Label
htmlFor={column.name}
className='font-normal text-[13px] text-[var(--text-tertiary)]'
>
{value ? 'True' : 'False'}
</Label>
</div>
) : column.type === 'json' ? (
<Textarea
id={column.name}
value={formatValueForInput(value, column.type)}
onChange={(e) => onChange(e.target.value)}
placeholder='{"key": "value"}'
rows={4}
className='font-mono text-[12px]'
required={column.required}
/>
) : (
<Input
id={column.name}
type={column.type === 'number' ? 'number' : column.type === 'date' ? 'date' : 'text'}
value={formatValueForInput(value, column.type)}
onChange={(e) => onChange(e.target.value)}
placeholder={`Enter ${column.name}`}
className='h-[38px]'
required={column.required}
/>
)}
<div className='text-[12px] text-[var(--text-tertiary)]'>
Type: {column.type}
{!column.required && ' (optional)'}
</div>
</div>
)
}

View File

@@ -0,0 +1,94 @@
import {
Badge,
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import type { ColumnDefinition } from '@/lib/table'
import { getTypeBadgeVariant } from '../lib/utils'
interface SchemaModalProps {
isOpen: boolean
onClose: () => void
columns: ColumnDefinition[]
tableName?: string
}
export function SchemaModal({ isOpen, onClose, columns, tableName }: SchemaModalProps) {
const columnCount = columns.length
return (
<Modal open={isOpen} onOpenChange={onClose}>
<ModalContent size='md'>
<ModalHeader>Table Schema</ModalHeader>
<ModalBody className='max-h-[60vh] overflow-y-auto'>
<div className='mb-[10px] flex items-center justify-between gap-[8px]'>
{tableName ? (
<span className='truncate font-medium text-[13px] text-[var(--text-primary)]'>
{tableName}
</span>
) : (
<div />
)}
<Badge variant='gray' size='sm'>
{columnCount} {columnCount === 1 ? 'column' : 'columns'}
</Badge>
</div>
<Table>
<TableHeader>
<TableRow>
<TableHead>Column</TableHead>
<TableHead>Type</TableHead>
<TableHead>Constraints</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{columns.map((column) => (
<TableRow key={column.name}>
<TableCell className='font-mono'>{column.name}</TableCell>
<TableCell>
<Badge variant={getTypeBadgeVariant(column.type)} size='sm'>
{column.type}
</Badge>
</TableCell>
<TableCell>
<div className='flex items-center gap-[6px]'>
{column.required && (
<Badge variant='red' size='sm'>
required
</Badge>
)}
{column.unique && (
<Badge variant='purple' size='sm'>
unique
</Badge>
)}
{!column.required && !column.unique && (
<span className='text-[var(--text-muted)]'>None</span>
)}
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={onClose}>
Close
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}

View File

@@ -0,0 +1,308 @@
'use client'
import { useCallback, useState } from 'react'
import { useParams, useRouter } from 'next/navigation'
import {
Badge,
Checkbox,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { TableRow as TableRowType } from '@/lib/table'
import { useContextMenu, useRowSelection, useTableData } from '../hooks'
import type { CellViewerData, QueryOptions } from '../lib/types'
import { ActionBar } from './action-bar'
import { EmptyRows, LoadingRows } from './body-states'
import { CellRenderer } from './cell-renderer'
import { CellViewerModal } from './cell-viewer-modal'
import { ContextMenu } from './context-menu'
import { HeaderBar } from './header-bar'
import { Pagination } from './pagination'
import { QueryBuilder } from './query-builder'
import { RowModal } from './row-modal'
import { SchemaModal } from './schema-modal'
export function TableViewer() {
const params = useParams()
const router = useRouter()
const workspaceId = params.workspaceId as string
const tableId = params.tableId as string
const [queryOptions, setQueryOptions] = useState<QueryOptions>({
filter: null,
sort: null,
})
const [currentPage, setCurrentPage] = useState(0)
const [showAddModal, setShowAddModal] = useState(false)
const [editingRow, setEditingRow] = useState<TableRowType | null>(null)
const [deletingRows, setDeletingRows] = useState<string[]>([])
const [showSchemaModal, setShowSchemaModal] = useState(false)
const [cellViewer, setCellViewer] = useState<CellViewerData | null>(null)
const [copied, setCopied] = useState(false)
const { tableData, isLoadingTable, rows, totalCount, totalPages, isLoadingRows, refetchRows } =
useTableData({
workspaceId,
tableId,
queryOptions,
currentPage,
})
const { selectedRows, handleSelectAll, handleSelectRow, clearSelection } = useRowSelection(rows)
const { contextMenu, handleRowContextMenu, closeContextMenu } = useContextMenu()
const columns = tableData?.schema?.columns || []
const selectedCount = selectedRows.size
const hasSelection = selectedCount > 0
const isAllSelected = rows.length > 0 && selectedCount === rows.length
const handleNavigateBack = useCallback(() => {
router.push(`/workspace/${workspaceId}/tables`)
}, [router, workspaceId])
const handleShowSchema = useCallback(() => {
setShowSchemaModal(true)
}, [])
const handleAddRow = useCallback(() => {
setShowAddModal(true)
}, [])
const handleApplyQueryOptions = useCallback((options: QueryOptions) => {
setQueryOptions(options)
setCurrentPage(0)
}, [])
const handleDeleteSelected = useCallback(() => {
setDeletingRows(Array.from(selectedRows))
}, [selectedRows])
const handleContextMenuEdit = useCallback(() => {
if (contextMenu.row) {
setEditingRow(contextMenu.row)
}
closeContextMenu()
}, [contextMenu.row, closeContextMenu])
const handleContextMenuDelete = useCallback(() => {
if (contextMenu.row) {
setDeletingRows([contextMenu.row.id])
}
closeContextMenu()
}, [contextMenu.row, closeContextMenu])
const handleCopyCellValue = useCallback(async () => {
if (cellViewer) {
let text: string
if (cellViewer.type === 'json') {
text = JSON.stringify(cellViewer.value, null, 2)
} else if (cellViewer.type === 'date') {
text = String(cellViewer.value)
} else {
text = String(cellViewer.value)
}
await navigator.clipboard.writeText(text)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
}, [cellViewer])
const handleCellClick = useCallback(
(columnName: string, value: unknown, type: CellViewerData['type']) => {
setCellViewer({ columnName, value, type })
},
[]
)
if (isLoadingTable) {
return (
<div className='flex h-full items-center justify-center'>
<span className='text-[13px] text-[var(--text-tertiary)]'>Loading table...</span>
</div>
)
}
if (!tableData) {
return (
<div className='flex h-full items-center justify-center'>
<span className='text-[13px] text-[var(--text-error)]'>Table not found</span>
</div>
)
}
return (
<div className='flex h-full flex-col'>
<HeaderBar
tableName={tableData.name}
totalCount={totalCount}
isLoading={isLoadingRows}
onNavigateBack={handleNavigateBack}
onShowSchema={handleShowSchema}
onRefresh={refetchRows}
/>
<div className='flex shrink-0 flex-col gap-[8px] border-[var(--border)] border-b px-[16px] py-[10px]'>
<QueryBuilder
columns={columns}
onApply={handleApplyQueryOptions}
onAddRow={handleAddRow}
isLoading={isLoadingRows}
/>
{hasSelection && (
<span className='text-[11px] text-[var(--text-tertiary)]'>{selectedCount} selected</span>
)}
</div>
{hasSelection && (
<ActionBar
selectedCount={selectedCount}
onDelete={handleDeleteSelected}
onClearSelection={clearSelection}
/>
)}
<div className='flex-1 overflow-auto'>
<Table>
<TableHeader className='sticky top-0 z-10 bg-[var(--surface-3)]'>
<TableRow>
<TableHead className='w-[40px]'>
<Checkbox size='sm' checked={isAllSelected} onCheckedChange={handleSelectAll} />
</TableHead>
{columns.map((column) => (
<TableHead key={column.name}>
<div className='flex items-center gap-[6px]'>
<span className='text-[12px]'>{column.name}</span>
<Badge variant='outline' size='sm'>
{column.type}
</Badge>
{column.required && (
<span className='text-[10px] text-[var(--text-error)]'>*</span>
)}
</div>
</TableHead>
))}
</TableRow>
</TableHeader>
<TableBody>
{isLoadingRows ? (
<LoadingRows columns={columns} />
) : rows.length === 0 ? (
<EmptyRows
columnCount={columns.length}
hasFilter={!!queryOptions.filter}
onAddRow={handleAddRow}
/>
) : (
rows.map((row) => (
<TableRow
key={row.id}
className={cn(
'group hover:bg-[var(--surface-4)]',
selectedRows.has(row.id) && 'bg-[var(--surface-5)]'
)}
onContextMenu={(e) => handleRowContextMenu(e, row)}
>
<TableCell>
<Checkbox
size='sm'
checked={selectedRows.has(row.id)}
onCheckedChange={() => handleSelectRow(row.id)}
/>
</TableCell>
{columns.map((column) => (
<TableCell key={column.name}>
<div className='max-w-[300px] truncate text-[13px]'>
<CellRenderer
value={row.data[column.name]}
column={column}
onCellClick={handleCellClick}
/>
</div>
</TableCell>
))}
</TableRow>
))
)}
</TableBody>
</Table>
</div>
<Pagination
currentPage={currentPage}
totalPages={totalPages}
totalCount={totalCount}
onPreviousPage={() => setCurrentPage((p) => Math.max(0, p - 1))}
onNextPage={() => setCurrentPage((p) => Math.min(totalPages - 1, p + 1))}
/>
{showAddModal && (
<RowModal
mode='add'
isOpen={true}
onClose={() => setShowAddModal(false)}
table={tableData}
onSuccess={() => {
setShowAddModal(false)
}}
/>
)}
{editingRow && (
<RowModal
mode='edit'
isOpen={true}
onClose={() => setEditingRow(null)}
table={tableData}
row={editingRow}
onSuccess={() => {
setEditingRow(null)
}}
/>
)}
{deletingRows.length > 0 && (
<RowModal
mode='delete'
isOpen={true}
onClose={() => setDeletingRows([])}
table={tableData}
rowIds={deletingRows}
onSuccess={() => {
setDeletingRows([])
clearSelection()
}}
/>
)}
<SchemaModal
isOpen={showSchemaModal}
onClose={() => setShowSchemaModal(false)}
columns={columns}
tableName={tableData.name}
/>
<CellViewerModal
cellViewer={cellViewer}
onClose={() => setCellViewer(null)}
onCopy={handleCopyCellValue}
copied={copied}
/>
<ContextMenu
contextMenu={contextMenu}
onClose={closeContextMenu}
onEdit={handleContextMenuEdit}
onDelete={handleContextMenuDelete}
/>
</div>
)
}

View File

@@ -0,0 +1,71 @@
'use client'
import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { AlertTriangle, ArrowLeft, RefreshCw } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { Button } from '@/components/emcn'
const logger = createLogger('TableViewerError')
interface TableViewerErrorProps {
error: Error & { digest?: string }
reset: () => void
}
export default function TableViewerError({ error, reset }: TableViewerErrorProps) {
const router = useRouter()
const params = useParams()
const workspaceId = params.workspaceId as string
useEffect(() => {
logger.error('Table viewer error:', { error: error.message, digest: error.digest })
}, [error])
return (
<div className='flex h-full flex-1 flex-col'>
{/* Header */}
<div className='flex h-[48px] shrink-0 items-center border-[var(--border)] border-b px-[16px]'>
<button
onClick={() => router.push(`/workspace/${workspaceId}/tables`)}
className='flex items-center gap-[6px] text-[13px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
<ArrowLeft className='h-[14px] w-[14px]' />
Back to Tables
</button>
</div>
{/* Error Content */}
<div className='flex flex-1 items-center justify-center'>
<div className='flex flex-col items-center gap-[16px] text-center'>
<div className='flex h-[48px] w-[48px] items-center justify-center rounded-full bg-[var(--surface-4)]'>
<AlertTriangle className='h-[24px] w-[24px] text-[var(--text-error)]' />
</div>
<div className='flex flex-col gap-[8px]'>
<h2 className='font-semibold text-[16px] text-[var(--text-primary)]'>
Failed to load table
</h2>
<p className='max-w-[300px] text-[13px] text-[var(--text-tertiary)]'>
Something went wrong while loading this table. The table may have been deleted or you
may not have permission to view it.
</p>
</div>
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={() => router.push(`/workspace/${workspaceId}/tables`)}
>
<ArrowLeft className='mr-[6px] h-[14px] w-[14px]' />
Go back
</Button>
<Button variant='default' size='sm' onClick={reset}>
<RefreshCw className='mr-[6px] h-[14px] w-[14px]' />
Try again
</Button>
</div>
</div>
</div>
</div>
)
}

View File

@@ -0,0 +1,3 @@
export * from './use-context-menu'
export * from './use-row-selection'
export * from './use-table-data'

View File

@@ -0,0 +1,37 @@
import { useCallback, useState } from 'react'
import type { TableRow } from '@/lib/table'
import type { ContextMenuState } from '../lib/types'
interface UseContextMenuReturn {
contextMenu: ContextMenuState
handleRowContextMenu: (e: React.MouseEvent, row: TableRow) => void
closeContextMenu: () => void
}
export function useContextMenu(): UseContextMenuReturn {
const [contextMenu, setContextMenu] = useState<ContextMenuState>({
isOpen: false,
position: { x: 0, y: 0 },
row: null,
})
const handleRowContextMenu = useCallback((e: React.MouseEvent, row: TableRow) => {
e.preventDefault()
e.stopPropagation()
setContextMenu({
isOpen: true,
position: { x: e.clientX, y: e.clientY },
row,
})
}, [])
const closeContextMenu = useCallback(() => {
setContextMenu((prev) => ({ ...prev, isOpen: false }))
}, [])
return {
contextMenu,
handleRowContextMenu,
closeContextMenu,
}
}

View File

@@ -0,0 +1,65 @@
import { useCallback, useMemo, useState } from 'react'
import type { TableRow } from '@/lib/table'
interface UseRowSelectionReturn {
selectedRows: Set<string>
handleSelectAll: () => void
handleSelectRow: (rowId: string) => void
clearSelection: () => void
}
export function useRowSelection(rows: TableRow[]): UseRowSelectionReturn {
const [selectedRows, setSelectedRows] = useState<Set<string>>(new Set())
const [prevRowsSignature, setPrevRowsSignature] = useState('')
const currentRowIds = useMemo(() => new Set(rows.map((r) => r.id)), [rows])
const rowsSignature = useMemo(() => rows.map((r) => r.id).join('|'), [rows])
if (rowsSignature !== prevRowsSignature) {
setPrevRowsSignature(rowsSignature)
setSelectedRows((prev) => {
if (prev.size === 0) return prev
const filtered = new Set([...prev].filter((id) => currentRowIds.has(id)))
return filtered.size !== prev.size ? filtered : prev
})
}
const visibleSelectedRows = useMemo(
() => new Set([...selectedRows].filter((id) => currentRowIds.has(id))),
[selectedRows, currentRowIds]
)
const handleSelectAll = useCallback(() => {
if (visibleSelectedRows.size === rows.length) {
setSelectedRows(new Set())
} else {
setSelectedRows(new Set(rows.map((r) => r.id)))
}
}, [rows, visibleSelectedRows.size])
const handleSelectRow = useCallback(
(rowId: string) => {
setSelectedRows((prev) => {
const newSet = new Set([...prev].filter((id) => currentRowIds.has(id)))
if (newSet.has(rowId)) {
newSet.delete(rowId)
} else {
newSet.add(rowId)
}
return newSet
})
},
[currentRowIds]
)
const clearSelection = useCallback(() => {
setSelectedRows(new Set())
}, [])
return {
selectedRows: visibleSelectedRows,
handleSelectAll,
handleSelectRow,
clearSelection,
}
}

View File

@@ -0,0 +1,58 @@
import type { TableDefinition, TableRow } from '@/lib/table'
import { useTable, useTableRows } from '@/hooks/queries/tables'
import { ROWS_PER_PAGE } from '../lib/constants'
import type { QueryOptions } from '../lib/types'
interface UseTableDataParams {
workspaceId: string
tableId: string
queryOptions: QueryOptions
currentPage: number
}
interface UseTableDataReturn {
tableData: TableDefinition | undefined
isLoadingTable: boolean
rows: TableRow[]
totalCount: number
totalPages: number
isLoadingRows: boolean
refetchRows: () => void
}
export function useTableData({
workspaceId,
tableId,
queryOptions,
currentPage,
}: UseTableDataParams): UseTableDataReturn {
const { data: tableData, isLoading: isLoadingTable } = useTable(workspaceId, tableId)
const {
data: rowsData,
isLoading: isLoadingRows,
refetch: refetchRows,
} = useTableRows({
workspaceId,
tableId,
limit: ROWS_PER_PAGE,
offset: currentPage * ROWS_PER_PAGE,
filter: queryOptions.filter,
sort: queryOptions.sort,
enabled: Boolean(workspaceId && tableId),
})
const rows = (rowsData?.rows || []) as TableRow[]
const totalCount = rowsData?.totalCount || 0
const totalPages = Math.ceil(totalCount / ROWS_PER_PAGE)
return {
tableData,
isLoadingTable,
rows,
totalCount,
totalPages,
isLoadingRows,
refetchRows,
}
}

View File

@@ -0,0 +1,2 @@
export const ROWS_PER_PAGE = 100
export const STRING_TRUNCATE_LENGTH = 50

View File

@@ -0,0 +1,3 @@
export * from './constants'
export * from './types'
export * from './utils'

View File

@@ -0,0 +1,27 @@
import type { Filter, Sort, TableRow } from '@/lib/table'
/**
* Query options for filtering and sorting table data
*/
export interface QueryOptions {
filter: Filter | null
sort: Sort | null
}
/**
* Data for viewing a cell's full content in a modal
*/
export interface CellViewerData {
columnName: string
value: unknown
type: 'json' | 'text' | 'date' | 'boolean' | 'number'
}
/**
* State for the row context menu (right-click)
*/
export interface ContextMenuState {
isOpen: boolean
position: { x: number; y: number }
row: TableRow | null
}

View File

@@ -0,0 +1,21 @@
type BadgeVariant = 'green' | 'blue' | 'purple' | 'orange' | 'teal' | 'gray'
/**
* Returns the appropriate badge color variant for a column type
*/
export function getTypeBadgeVariant(type: string): BadgeVariant {
switch (type) {
case 'string':
return 'green'
case 'number':
return 'blue'
case 'boolean':
return 'purple'
case 'json':
return 'orange'
case 'date':
return 'teal'
default:
return 'gray'
}
}

View File

@@ -0,0 +1,5 @@
import { TableViewer } from './components'
export default function TablePage() {
return <TableViewer />
}

View File

@@ -0,0 +1,330 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { Plus, Trash2 } from 'lucide-react'
import { nanoid } from 'nanoid'
import { useParams } from 'next/navigation'
import {
Button,
Checkbox,
Combobox,
Input,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { ColumnDefinition } from '@/lib/table'
import { useCreateTable } from '@/hooks/queries/tables'
const logger = createLogger('CreateModal')
interface CreateModalProps {
isOpen: boolean
onClose: () => void
}
const COLUMN_TYPE_OPTIONS: Array<{ value: ColumnDefinition['type']; label: string }> = [
{ value: 'string', label: 'String' },
{ value: 'number', label: 'Number' },
{ value: 'boolean', label: 'Boolean' },
{ value: 'date', label: 'Date' },
{ value: 'json', label: 'JSON' },
]
interface ColumnWithId extends ColumnDefinition {
id: string
}
function createEmptyColumn(): ColumnWithId {
return { id: nanoid(), name: '', type: 'string', required: true, unique: false }
}
export function CreateModal({ isOpen, onClose }: CreateModalProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [tableName, setTableName] = useState('')
const [description, setDescription] = useState('')
const [columns, setColumns] = useState<ColumnWithId[]>([createEmptyColumn()])
const [error, setError] = useState<string | null>(null)
const createTable = useCreateTable(workspaceId)
const handleAddColumn = () => {
setColumns([...columns, createEmptyColumn()])
}
const handleRemoveColumn = (columnId: string) => {
if (columns.length > 1) {
setColumns(columns.filter((col) => col.id !== columnId))
}
}
const handleColumnChange = (
columnId: string,
field: keyof ColumnDefinition,
value: string | boolean
) => {
setColumns(columns.map((col) => (col.id === columnId ? { ...col, [field]: value } : col)))
}
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault()
setError(null)
if (!tableName.trim()) {
setError('Table name is required')
return
}
// Validate column names
const validColumns = columns.filter((col) => col.name.trim())
if (validColumns.length === 0) {
setError('At least one column is required')
return
}
// Check for duplicate column names
const columnNames = validColumns.map((col) => col.name.toLowerCase())
const uniqueNames = new Set(columnNames)
if (uniqueNames.size !== columnNames.length) {
setError('Duplicate column names found')
return
}
// Strip internal IDs before sending to API
const columnsForApi = validColumns.map(({ id: _id, ...col }) => col)
try {
await createTable.mutateAsync({
name: tableName,
description: description || undefined,
schema: {
columns: columnsForApi,
},
})
// Reset form
resetForm()
onClose()
} catch (err) {
logger.error('Failed to create table:', err)
setError(err instanceof Error ? err.message : 'Failed to create table')
}
}
const resetForm = () => {
setTableName('')
setDescription('')
setColumns([createEmptyColumn()])
setError(null)
}
const handleClose = () => {
resetForm()
onClose()
}
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent size='lg'>
<ModalHeader>Create Table</ModalHeader>
<form onSubmit={handleSubmit} className='flex min-h-0 flex-1 flex-col'>
<ModalBody>
<div className='min-h-0 flex-1 overflow-y-auto'>
<div className='space-y-[12px]'>
<p className='text-[12px] text-[var(--text-tertiary)]'>
Define your table schema with columns and constraints.
</p>
{error && (
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
)}
<div className='flex flex-col gap-[8px]'>
<Label htmlFor='tableName'>Name</Label>
<Input
id='tableName'
value={tableName}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
setTableName(e.target.value)
}
placeholder='customers, orders, products'
className={cn(
error === 'Table name is required' && 'border-[var(--text-error)]'
)}
required
/>
<p className='text-[11px] text-[var(--text-muted)]'>
Use lowercase with underscores (e.g., customer_orders)
</p>
</div>
<div className='flex flex-col gap-[8px]'>
<Label htmlFor='description'>Description</Label>
<Textarea
id='description'
value={description}
onChange={(e: React.ChangeEvent<HTMLTextAreaElement>) =>
setDescription(e.target.value)
}
placeholder='Optional description for this table'
rows={3}
className='resize-none'
/>
</div>
<div className='space-y-[8px]'>
<div className='flex items-center justify-between'>
<Label>Columns*</Label>
<Button
type='button'
size='sm'
variant='default'
onClick={handleAddColumn}
className='h-[30px] rounded-[6px] px-[12px] text-[12px]'
>
<Plus className='mr-[4px] h-[14px] w-[14px]' />
Add Column
</Button>
</div>
<div className='space-y-[8px]'>
{columns.map((column, index) => (
<ColumnRow
key={column.id}
index={index}
column={column}
isRemovable={columns.length > 1}
onChange={handleColumnChange}
onRemove={handleRemoveColumn}
/>
))}
</div>
<p className='text-[11px] text-[var(--text-muted)]'>
Mark columns as <span className='font-medium'>unique</span> to prevent duplicate
values (e.g., id, email)
</p>
</div>
</div>
</div>
</ModalBody>
<ModalFooter>
<div className='flex w-full items-center justify-end gap-[8px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
disabled={createTable.isPending}
>
Cancel
</Button>
<Button
type='submit'
variant='tertiary'
disabled={createTable.isPending}
className='min-w-[120px]'
>
{createTable.isPending ? 'Creating...' : 'Create Table'}
</Button>
</div>
</ModalFooter>
</form>
</ModalContent>
</Modal>
)
}
interface ColumnRowProps {
index: number
column: ColumnWithId
isRemovable: boolean
onChange: (columnId: string, field: keyof ColumnDefinition, value: string | boolean) => void
onRemove: (columnId: string) => void
}
function ColumnRow({ index, column, isRemovable, onChange, onRemove }: ColumnRowProps) {
return (
<div className='rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-1)] p-[10px]'>
<div className='mb-[8px] flex items-center justify-between'>
<span className='font-medium text-[11px] text-[var(--text-tertiary)]'>
Column {index + 1}
</span>
<Button
type='button'
size='sm'
variant='ghost'
onClick={() => onRemove(column.id)}
disabled={!isRemovable}
className='h-[28px] w-[28px] p-0 text-[var(--text-tertiary)] transition-colors hover:bg-[var(--bg-error)] hover:text-[var(--text-error)]'
>
<Trash2 className='h-[15px] w-[15px]' />
</Button>
</div>
<div className='grid grid-cols-[minmax(0,1fr)_120px_76px_76px] items-end gap-[10px]'>
<div className='flex flex-col gap-[6px]'>
<Label
htmlFor={`column-name-${column.id}`}
className='text-[11px] text-[var(--text-muted)]'
>
Name
</Label>
<Input
id={`column-name-${column.id}`}
value={column.name}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
onChange(column.id, 'name', e.target.value)
}
placeholder='column_name'
className='h-[36px]'
/>
</div>
<div className='flex flex-col gap-[6px]'>
<Label
htmlFor={`column-type-${column.id}`}
className='text-[11px] text-[var(--text-muted)]'
>
Type
</Label>
<Combobox
options={COLUMN_TYPE_OPTIONS}
value={column.type}
selectedValue={column.type}
onChange={(value) => onChange(column.id, 'type', value as ColumnDefinition['type'])}
placeholder='Type'
editable={false}
filterOptions={false}
className='h-[36px]'
/>
</div>
<div className='flex flex-col items-center gap-[8px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>Required</span>
<Checkbox
checked={column.required}
onCheckedChange={(checked) => onChange(column.id, 'required', checked === true)}
/>
</div>
<div className='flex flex-col items-center gap-[8px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>Unique</span>
<Checkbox
checked={column.unique}
onCheckedChange={(checked) => onChange(column.id, 'unique', checked === true)}
/>
</div>
</div>
</div>
)
}

View File

@@ -0,0 +1,20 @@
interface EmptyStateProps {
hasSearchQuery: boolean
}
export function EmptyState({ hasSearchQuery }: EmptyStateProps) {
return (
<div className='col-span-full flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
<div className='text-center'>
<p className='font-medium text-[var(--text-secondary)] text-sm'>
{hasSearchQuery ? 'No tables found' : 'No tables yet'}
</p>
<p className='mt-1 text-[var(--text-muted)] text-xs'>
{hasSearchQuery
? 'Try a different search term'
: 'Create your first table to store structured data for your workflows'}
</p>
</div>
</div>
)
}

View File

@@ -0,0 +1,16 @@
interface ErrorStateProps {
error: unknown
}
export function ErrorState({ error }: ErrorStateProps) {
return (
<div className='col-span-full flex h-64 items-center justify-center rounded-[4px] bg-[var(--surface-3)] dark:bg-[var(--surface-4)]'>
<div className='text-center'>
<p className='font-medium text-[var(--text-secondary)] text-sm'>Error loading tables</p>
<p className='mt-1 text-[var(--text-muted)] text-xs'>
{error instanceof Error ? error.message : 'An error occurred'}
</p>
</div>
</div>
)
}

View File

@@ -0,0 +1,6 @@
export * from './create-modal'
export * from './empty-state'
export * from './error-state'
export * from './loading-state'
export * from './table-card'
export * from './tables-view'

View File

@@ -0,0 +1,31 @@
export function LoadingState() {
return (
<>
{Array.from({ length: 8 }).map((_, i) => (
<div
key={i}
className='flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] dark:bg-[var(--surface-4)]'
>
<div className='flex items-center justify-between gap-[8px]'>
<div className='h-[17px] w-[120px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[22px] w-[90px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='flex flex-1 flex-col gap-[8px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='h-[15px] w-[60px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='h-0 w-full border-[var(--divider)] border-t' />
<div className='flex h-[36px] flex-col gap-[6px]'>
<div className='h-[15px] w-full animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[15px] w-[75%] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
</div>
</div>
))}
</>
)
}

View File

@@ -0,0 +1,171 @@
'use client'
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Columns, Rows3 } from 'lucide-react'
import { useRouter } from 'next/navigation'
import {
Badge,
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Tooltip,
} from '@/components/emcn'
import type { TableDefinition } from '@/lib/table'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { TableContextMenu } from '@/app/workspace/[workspaceId]/tables/components/table-context-menu'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useDeleteTable } from '@/hooks/queries/tables'
import { SchemaModal } from '../[tableId]/components/schema-modal'
import { formatAbsoluteDate, formatRelativeTime } from '../lib/utils'
const logger = createLogger('TableCard')
interface TableCardProps {
table: TableDefinition
workspaceId: string
}
export function TableCard({ table, workspaceId }: TableCardProps) {
const router = useRouter()
const userPermissions = useUserPermissionsContext()
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
const [isSchemaModalOpen, setIsSchemaModalOpen] = useState(false)
const deleteTable = useDeleteTable(workspaceId)
const {
isOpen: isContextMenuOpen,
position: contextMenuPosition,
menuRef,
handleContextMenu,
closeMenu: closeContextMenu,
} = useContextMenu()
const handleDelete = async () => {
try {
await deleteTable.mutateAsync(table.id)
setIsDeleteDialogOpen(false)
} catch (error) {
logger.error('Failed to delete table:', error)
}
}
const navigateToTable = useCallback(() => {
router.push(`/workspace/${workspaceId}/tables/${table.id}`)
}, [router, workspaceId, table.id])
const columnCount = table.schema.columns.length
const shortId = `tb-${table.id.slice(0, 8)}`
return (
<>
<div
role='button'
tabIndex={0}
data-table-card
className='h-full cursor-pointer'
onClick={(e) => {
if (isContextMenuOpen) {
e.preventDefault()
return
}
navigateToTable()
}}
onKeyDown={(e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
navigateToTable()
}
}}
onContextMenu={handleContextMenu}
>
<div className='group flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] transition-colors hover:bg-[var(--surface-4)] dark:bg-[var(--surface-4)] dark:hover:bg-[var(--surface-5)]'>
<div className='flex items-center justify-between gap-[8px]'>
<h3 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
{table.name}
</h3>
<Badge className='flex-shrink-0 rounded-[4px] text-[12px]'>{shortId}</Badge>
</div>
<div className='flex flex-1 flex-col gap-[8px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px] text-[12px] text-[var(--text-tertiary)]'>
<span className='flex items-center gap-[4px]'>
<Columns className='h-[12px] w-[12px]' />
{columnCount} {columnCount === 1 ? 'col' : 'cols'}
</span>
<span className='flex items-center gap-[4px]'>
<Rows3 className='h-[12px] w-[12px]' />
{table.rowCount} {table.rowCount === 1 ? 'row' : 'rows'}
</span>
</div>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<span className='text-[12px] text-[var(--text-tertiary)]'>
{formatRelativeTime(table.updatedAt)}
</span>
</Tooltip.Trigger>
<Tooltip.Content>{formatAbsoluteDate(table.updatedAt)}</Tooltip.Content>
</Tooltip.Root>
</div>
<div className='h-0 w-full border-[var(--divider)] border-t' />
<p className='line-clamp-2 h-[36px] text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
{table.description || 'No description'}
</p>
</div>
</div>
</div>
<TableContextMenu
isOpen={isContextMenuOpen}
position={contextMenuPosition}
menuRef={menuRef}
onClose={closeContextMenu}
onViewSchema={() => setIsSchemaModalOpen(true)}
onCopyId={() => navigator.clipboard.writeText(table.id)}
onDelete={() => setIsDeleteDialogOpen(true)}
disableDelete={userPermissions.canEdit !== true}
/>
{/* Delete Confirmation Modal */}
<Modal open={isDeleteDialogOpen} onOpenChange={setIsDeleteDialogOpen}>
<ModalContent className='w-[400px]'>
<ModalHeader>Delete Table</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete{' '}
<span className='font-medium text-[var(--text-primary)]'>{table.name}</span>? This
will permanently delete all {table.rowCount} rows.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button
variant='default'
onClick={() => setIsDeleteDialogOpen(false)}
disabled={deleteTable.isPending}
>
Cancel
</Button>
<Button variant='default' onClick={handleDelete} disabled={deleteTable.isPending}>
{deleteTable.isPending ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
{/* Schema Viewer Modal */}
<SchemaModal
isOpen={isSchemaModalOpen}
onClose={() => setIsSchemaModalOpen(false)}
columns={table.schema.columns}
tableName={table.name}
/>
</>
)
}

View File

@@ -0,0 +1,85 @@
'use client'
import {
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
interface TableContextMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: React.RefObject<HTMLDivElement | null>
onClose: () => void
onViewSchema?: () => void
onCopyId?: () => void
onDelete?: () => void
disableDelete?: boolean
}
export function TableContextMenu({
isOpen,
position,
menuRef,
onClose,
onViewSchema,
onCopyId,
onDelete,
disableDelete = false,
}: TableContextMenuProps) {
return (
<Popover
open={isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${position.x}px`,
top: `${position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{onViewSchema && (
<PopoverItem
onClick={() => {
onViewSchema()
onClose()
}}
>
View Schema
</PopoverItem>
)}
{onViewSchema && (onCopyId || onDelete) && <PopoverDivider />}
{onCopyId && (
<PopoverItem
onClick={() => {
onCopyId()
onClose()
}}
>
Copy ID
</PopoverItem>
)}
{onCopyId && onDelete && <PopoverDivider />}
{onDelete && (
<PopoverItem
disabled={disableDelete}
onClick={() => {
onDelete()
onClose()
}}
>
Delete
</PopoverItem>
)}
</PopoverContent>
</Popover>
)
}

View File

@@ -0,0 +1,53 @@
'use client'
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
interface TablesListContextMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: React.RefObject<HTMLDivElement | null>
onClose: () => void
onCreateTable?: () => void
disableCreate?: boolean
}
export function TablesListContextMenu({
isOpen,
position,
menuRef,
onClose,
onCreateTable,
disableCreate = false,
}: TablesListContextMenuProps) {
return (
<Popover
open={isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${position.x}px`,
top: `${position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{onCreateTable && (
<PopoverItem
disabled={disableCreate}
onClick={() => {
onCreateTable()
onClose()
}}
>
Create table
</PopoverItem>
)}
</PopoverContent>
</Popover>
)
}

View File

@@ -0,0 +1,141 @@
'use client'
import { useCallback, useState } from 'react'
import { Database, Plus, Search } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Input, Tooltip } from '@/components/emcn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useTablesList } from '@/hooks/queries/tables'
import { useDebounce } from '@/hooks/use-debounce'
import { CreateModal } from './create-modal'
import { EmptyState } from './empty-state'
import { ErrorState } from './error-state'
import { LoadingState } from './loading-state'
import { TableCard } from './table-card'
import { TablesListContextMenu } from './tables-list-context-menu'
export function TablesView() {
const params = useParams()
const workspaceId = params.workspaceId as string
const userPermissions = useUserPermissionsContext()
const { data: tables = [], isLoading, error } = useTablesList(workspaceId)
const [searchQuery, setSearchQuery] = useState('')
const debouncedSearchQuery = useDebounce(searchQuery, 300)
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false)
const {
isOpen: isListContextMenuOpen,
position: listContextMenuPosition,
menuRef: listMenuRef,
handleContextMenu: handleListContextMenu,
closeMenu: closeListContextMenu,
} = useContextMenu()
const handleContentContextMenu = useCallback(
(e: React.MouseEvent) => {
const target = e.target as HTMLElement
const isOnCard = target.closest('[data-table-card]')
const isOnInteractive = target.closest('button, input, a, [role="button"]')
if (!isOnCard && !isOnInteractive) {
handleListContextMenu(e)
}
},
[handleListContextMenu]
)
// Filter tables by search query
const filteredTables = tables.filter((table) => {
if (!debouncedSearchQuery) return true
const query = debouncedSearchQuery.toLowerCase()
return (
table.name.toLowerCase().includes(query) || table.description?.toLowerCase().includes(query)
)
})
return (
<>
<div className='flex h-full flex-1 flex-col'>
<div className='flex flex-1 overflow-hidden'>
<div
className='flex flex-1 flex-col overflow-auto bg-white px-[24px] pt-[28px] pb-[24px] dark:bg-[var(--bg)]'
onContextMenu={handleContentContextMenu}
>
{/* Header */}
<div>
<div className='flex items-start gap-[12px]'>
<div className='flex h-[26px] w-[26px] items-center justify-center rounded-[6px] border border-[#3B82F6] bg-[#EFF6FF] dark:border-[#1E40AF] dark:bg-[#1E3A5F]'>
<Database className='h-[14px] w-[14px] text-[#3B82F6] dark:text-[#60A5FA]' />
</div>
<h1 className='font-medium text-[18px]'>Tables</h1>
</div>
<p className='mt-[10px] text-[14px] text-[var(--text-tertiary)]'>
Create and manage data tables for your workflows.
</p>
</div>
{/* Search and Actions */}
<div className='mt-[14px] flex items-center justify-between'>
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
<Input
placeholder='Search'
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
/>
</div>
<div className='flex items-center gap-[8px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
onClick={() => setIsCreateModalOpen(true)}
disabled={userPermissions.canEdit !== true}
variant='tertiary'
className='h-[32px] rounded-[6px]'
>
<Plus className='mr-[6px] h-[14px] w-[14px]' />
Create Table
</Button>
</Tooltip.Trigger>
{userPermissions.canEdit !== true && (
<Tooltip.Content>Write permission required to create tables</Tooltip.Content>
)}
</Tooltip.Root>
</div>
</div>
{/* Content */}
<div className='mt-[24px] grid grid-cols-1 gap-[20px] md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4'>
{isLoading ? (
<LoadingState />
) : error ? (
<ErrorState error={error} />
) : filteredTables.length === 0 ? (
<EmptyState hasSearchQuery={!!searchQuery} />
) : (
filteredTables.map((table) => (
<TableCard key={table.id} table={table} workspaceId={workspaceId} />
))
)}
</div>
</div>
</div>
</div>
<TablesListContextMenu
isOpen={isListContextMenuOpen}
position={listContextMenuPosition}
menuRef={listMenuRef}
onClose={closeListContextMenu}
onCreateTable={() => setIsCreateModalOpen(true)}
disableCreate={userPermissions.canEdit !== true}
/>
<CreateModal isOpen={isCreateModalOpen} onClose={() => setIsCreateModalOpen(false)} />
</>
)
}

View File

@@ -0,0 +1,41 @@
'use client'
import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { AlertTriangle, RefreshCw } from 'lucide-react'
import { Button } from '@/components/emcn'
const logger = createLogger('TablesError')
interface TablesErrorProps {
error: Error & { digest?: string }
reset: () => void
}
export default function TablesError({ error, reset }: TablesErrorProps) {
useEffect(() => {
logger.error('Tables error:', { error: error.message, digest: error.digest })
}, [error])
return (
<div className='flex h-full flex-1 items-center justify-center bg-white dark:bg-[var(--bg)]'>
<div className='flex flex-col items-center gap-[16px] text-center'>
<div className='flex h-[48px] w-[48px] items-center justify-center rounded-full bg-[var(--surface-4)]'>
<AlertTriangle className='h-[24px] w-[24px] text-[var(--text-error)]' />
</div>
<div className='flex flex-col gap-[8px]'>
<h2 className='font-semibold text-[16px] text-[var(--text-primary)]'>
Failed to load tables
</h2>
<p className='max-w-[300px] text-[13px] text-[var(--text-tertiary)]'>
Something went wrong while loading the tables. Please try again.
</p>
</div>
<Button variant='default' size='sm' onClick={reset}>
<RefreshCw className='mr-[6px] h-[14px] w-[14px]' />
Try again
</Button>
</div>
</div>
)
}

View File

@@ -0,0 +1,7 @@
export default function TablesLayout({ children }: { children: React.ReactNode }) {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
{children}
</div>
)
}

View File

@@ -0,0 +1 @@
export * from './utils'

View File

@@ -0,0 +1,32 @@
/**
* Formats a date as relative time (e.g., "5m ago", "2d ago")
*/
export function formatRelativeTime(dateValue: string | Date): string {
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) return 'just now'
if (diffInSeconds < 3600) return `${Math.floor(diffInSeconds / 60)}m ago`
if (diffInSeconds < 86400) return `${Math.floor(diffInSeconds / 3600)}h ago`
if (diffInSeconds < 604800) return `${Math.floor(diffInSeconds / 86400)}d ago`
if (diffInSeconds < 2592000) return `${Math.floor(diffInSeconds / 604800)}w ago`
if (diffInSeconds < 31536000) return `${Math.floor(diffInSeconds / 2592000)}mo ago`
return `${Math.floor(diffInSeconds / 31536000)}y ago`
}
/**
* Formats a date as absolute date string (e.g., "Jan 15, 2024, 10:30 AM")
*/
export function formatAbsoluteDate(dateValue: string | Date): string {
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
}

View File

@@ -0,0 +1,26 @@
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { TablesView } from './components'
interface TablesPageProps {
params: Promise<{
workspaceId: string
}>
}
export default async function TablesPage({ params }: TablesPageProps) {
const { workspaceId } = await params
const session = await getSession()
if (!session?.user?.id) {
redirect('/')
}
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect('/')
}
return <TablesView />
}

View File

@@ -259,6 +259,7 @@ export const Code = memo(function Code({
case 'json-schema':
return 'Describe the JSON schema to generate...'
case 'json-object':
case 'table-schema':
return 'Describe the JSON object to generate...'
default:
return 'Describe the JavaScript code to generate...'
@@ -283,9 +284,14 @@ export const Code = memo(function Code({
return wandConfig
}, [wandConfig, languageValue])
const [tableIdValue] = useSubBlockValue<string>(blockId, 'tableId')
const wandHook = useWand({
wandConfig: dynamicWandConfig || { enabled: false, prompt: '' },
currentValue: code,
contextParams: {
tableId: typeof tableIdValue === 'string' ? tableIdValue : null,
},
onStreamStart: () => handleStreamStartRef.current?.(),
onStreamChunk: (chunk: string) => handleStreamChunkRef.current?.(chunk),
onGeneratedContent: (content: string) => handleGeneratedContentRef.current?.(content),

View File

@@ -0,0 +1,19 @@
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
interface EmptyStateProps {
onAdd: () => void
disabled: boolean
label: string
}
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
return (
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
{label}
</Button>
</div>
)
}

View File

@@ -0,0 +1,137 @@
import { X } from 'lucide-react'
import { Button, Combobox, type ComboboxOption, Input } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { FilterRule } from '@/lib/table/query-builder/constants'
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
interface FilterRuleRowProps {
blockId: string
subBlockId: string
rule: FilterRule
index: number
columns: ComboboxOption[]
comparisonOptions: ComboboxOption[]
logicalOptions: ComboboxOption[]
isReadOnly: boolean
isPreview: boolean
disabled: boolean
onRemove: (id: string) => void
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
}
export function FilterRuleRow({
blockId,
subBlockId,
rule,
index,
columns,
comparisonOptions,
logicalOptions,
isReadOnly,
isPreview,
disabled,
onRemove,
onUpdate,
}: FilterRuleRowProps) {
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
return (
<div className='flex items-center gap-[6px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
disabled={isReadOnly}
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
{index === 0 ? (
<Combobox
size='sm'
options={[{ value: 'where', label: 'where' }]}
value='where'
disabled
/>
) : (
<Combobox
size='sm'
options={logicalOptions}
value={rule.logicalOperator}
onChange={(v) => onUpdate(rule.id, 'logicalOperator', v as 'and' | 'or')}
disabled={isReadOnly}
/>
)}
</div>
<div className='w-[100px] shrink-0'>
<Combobox
size='sm'
options={columns}
value={rule.column}
onChange={(v) => onUpdate(rule.id, 'column', v)}
placeholder='Column'
disabled={isReadOnly}
/>
</div>
<div className='w-[110px] shrink-0'>
<Combobox
size='sm'
options={comparisonOptions}
value={rule.operator}
onChange={(v) => onUpdate(rule.id, 'operator', v)}
disabled={isReadOnly}
/>
</div>
<div className='relative min-w-[80px] flex-1'>
<SubBlockInputController
blockId={blockId}
subBlockId={`${subBlockId}_filter_${rule.id}`}
config={{ id: `filter_value_${rule.id}`, type: 'short-input' }}
value={rule.value}
onChange={(newValue) => onUpdate(rule.id, 'value', newValue)}
isPreview={isPreview}
disabled={disabled}
>
{({ ref, value: ctrlValue, onChange, onKeyDown, onDrop, onDragOver }) => {
const formattedText = formatDisplayText(ctrlValue, {
accessiblePrefixes,
highlightAll: !accessiblePrefixes,
})
return (
<div className='relative'>
<Input
ref={ref as React.RefObject<HTMLInputElement>}
className='h-[28px] w-full overflow-auto text-[12px] text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden'
value={ctrlValue}
onChange={onChange as (e: React.ChangeEvent<HTMLInputElement>) => void}
onKeyDown={onKeyDown as (e: React.KeyboardEvent<HTMLInputElement>) => void}
onDrop={onDrop as (e: React.DragEvent<HTMLInputElement>) => void}
onDragOver={onDragOver as (e: React.DragEvent<HTMLInputElement>) => void}
placeholder='Value'
disabled={isReadOnly}
autoComplete='off'
/>
<div
className={cn(
'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-[12px] text-foreground [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
(isPreview || disabled) && 'opacity-50'
)}
>
<div className='min-w-fit whitespace-pre'>{formattedText}</div>
</div>
</div>
)
}}
</SubBlockInputController>
</div>
</div>
)
}

View File

@@ -0,0 +1,90 @@
'use client'
import { useMemo } from 'react'
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
import { useTableColumns } from '@/lib/table/hooks'
import type { FilterRule } from '@/lib/table/query-builder/constants'
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { EmptyState } from './components/empty-state'
import { FilterRuleRow } from './components/filter-rule-row'
interface FilterBuilderProps {
blockId: string
subBlockId: string
isPreview?: boolean
previewValue?: FilterRule[] | null
disabled?: boolean
columns?: Array<{ value: string; label: string }>
tableIdSubBlockId?: string
}
/** Visual builder for table filter rules in workflow blocks. */
export function FilterBuilder({
blockId,
subBlockId,
isPreview = false,
previewValue,
disabled = false,
columns: propColumns,
tableIdSubBlockId = 'tableId',
}: FilterBuilderProps) {
const [storeValue, setStoreValue] = useSubBlockValue<FilterRule[]>(blockId, subBlockId)
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
const dynamicColumns = useTableColumns({ tableId: tableIdValue })
const columns = useMemo(() => {
if (propColumns && propColumns.length > 0) return propColumns
return dynamicColumns
}, [propColumns, dynamicColumns])
const value = isPreview ? previewValue : storeValue
const rules: FilterRule[] = Array.isArray(value) && value.length > 0 ? value : []
const isReadOnly = isPreview || disabled
const { comparisonOptions, logicalOptions, addRule, removeRule, updateRule } = useFilterBuilder({
columns,
rules,
setRules: setStoreValue,
isReadOnly,
})
return (
<div className='flex flex-col gap-[8px]'>
{rules.length === 0 ? (
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add filter rule' />
) : (
<>
{rules.map((rule, index) => (
<FilterRuleRow
key={rule.id}
blockId={blockId}
subBlockId={subBlockId}
rule={rule}
index={index}
columns={columns}
comparisonOptions={comparisonOptions}
logicalOptions={logicalOptions}
isReadOnly={isReadOnly}
isPreview={isPreview}
disabled={disabled}
onRemove={removeRule}
onUpdate={updateRule}
/>
))}
<Button
variant='ghost'
size='sm'
onClick={addRule}
disabled={isReadOnly}
className='self-start'
>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add rule
</Button>
</>
)}
</div>
)
}

View File

@@ -9,6 +9,7 @@ export { Dropdown } from './dropdown/dropdown'
export { EvalInput } from './eval-input/eval-input'
export { FileSelectorInput } from './file-selector/file-selector-input'
export { FileUpload } from './file-upload/file-upload'
export { FilterBuilder } from './filter-builder/filter-builder'
export { FolderSelectorInput } from './folder-selector/components/folder-selector-input'
export { GroupedCheckboxList } from './grouped-checkbox-list/grouped-checkbox-list'
export { InputMapping } from './input-mapping/input-mapping'
@@ -27,10 +28,12 @@ export { ShortInput } from './short-input/short-input'
export { SkillInput } from './skill-input/skill-input'
export { SlackSelectorInput } from './slack-selector/slack-selector-input'
export { SliderInput } from './slider-input/slider-input'
export { SortBuilder } from './sort-builder/sort-builder'
export { InputFormat } from './starter/input-format'
export { SubBlockInputController } from './sub-block-input-controller'
export { Switch } from './switch/switch'
export { Table } from './table/table'
export { TableSelector } from './table-selector/table-selector'
export { Text } from './text/text'
export { TimeInput } from './time-input/time-input'
export { ToolInput } from './tool-input/tool-input'

View File

@@ -0,0 +1,19 @@
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
interface EmptyStateProps {
onAdd: () => void
disabled: boolean
label: string
}
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
return (
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
{label}
</Button>
</div>
)
}

View File

@@ -0,0 +1,67 @@
import { X } from 'lucide-react'
import { Button, Combobox, type ComboboxOption } from '@/components/emcn'
import type { SortRule } from '@/lib/table/query-builder/constants'
interface SortRuleRowProps {
rule: SortRule
index: number
columns: ComboboxOption[]
directionOptions: ComboboxOption[]
isReadOnly: boolean
onRemove: (id: string) => void
onUpdate: (id: string, field: keyof SortRule, value: string) => void
}
export function SortRuleRow({
rule,
index,
columns,
directionOptions,
isReadOnly,
onRemove,
onUpdate,
}: SortRuleRowProps) {
return (
<div className='flex items-center gap-[6px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
disabled={isReadOnly}
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[90px] shrink-0'>
<Combobox
size='sm'
options={[{ value: String(index + 1), label: index === 0 ? 'order by' : 'then by' }]}
value={String(index + 1)}
disabled
/>
</div>
<div className='min-w-[120px] flex-1'>
<Combobox
size='sm'
options={columns}
value={rule.column}
onChange={(v) => onUpdate(rule.id, 'column', v)}
placeholder='Column'
disabled={isReadOnly}
/>
</div>
<div className='w-[110px] shrink-0'>
<Combobox
size='sm'
options={directionOptions}
value={rule.direction}
onChange={(v) => onUpdate(rule.id, 'direction', v as 'asc' | 'desc')}
disabled={isReadOnly}
/>
</div>
</div>
)
}

View File

@@ -0,0 +1,110 @@
'use client'
import { useCallback, useMemo } from 'react'
import { Plus } from 'lucide-react'
import { nanoid } from 'nanoid'
import { Button, type ComboboxOption } from '@/components/emcn'
import { useTableColumns } from '@/lib/table/hooks'
import { SORT_DIRECTIONS, type SortRule } from '@/lib/table/query-builder/constants'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { EmptyState } from './components/empty-state'
import { SortRuleRow } from './components/sort-rule-row'
interface SortBuilderProps {
blockId: string
subBlockId: string
isPreview?: boolean
previewValue?: SortRule[] | null
disabled?: boolean
columns?: Array<{ value: string; label: string }>
tableIdSubBlockId?: string
}
const createDefaultRule = (columns: ComboboxOption[]): SortRule => ({
id: nanoid(),
column: columns[0]?.value || '',
direction: 'asc',
})
/** Visual builder for table sort rules in workflow blocks. */
export function SortBuilder({
blockId,
subBlockId,
isPreview = false,
previewValue,
disabled = false,
columns: propColumns,
tableIdSubBlockId = 'tableId',
}: SortBuilderProps) {
const [storeValue, setStoreValue] = useSubBlockValue<SortRule[]>(blockId, subBlockId)
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
const dynamicColumns = useTableColumns({ tableId: tableIdValue, includeBuiltIn: true })
const columns = useMemo(() => {
if (propColumns && propColumns.length > 0) return propColumns
return dynamicColumns
}, [propColumns, dynamicColumns])
const directionOptions = useMemo(
() => SORT_DIRECTIONS.map((dir) => ({ value: dir.value, label: dir.label })),
[]
)
const value = isPreview ? previewValue : storeValue
const rules: SortRule[] = Array.isArray(value) && value.length > 0 ? value : []
const isReadOnly = isPreview || disabled
const addRule = useCallback(() => {
if (isReadOnly) return
setStoreValue([...rules, createDefaultRule(columns)])
}, [isReadOnly, rules, columns, setStoreValue])
const removeRule = useCallback(
(id: string) => {
if (isReadOnly) return
setStoreValue(rules.filter((r) => r.id !== id))
},
[isReadOnly, rules, setStoreValue]
)
const updateRule = useCallback(
(id: string, field: keyof SortRule, newValue: string) => {
if (isReadOnly) return
setStoreValue(rules.map((r) => (r.id === id ? { ...r, [field]: newValue } : r)))
},
[isReadOnly, rules, setStoreValue]
)
return (
<div className='flex flex-col gap-[8px]'>
{rules.length === 0 ? (
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add sort rule' />
) : (
<>
{rules.map((rule, index) => (
<SortRuleRow
key={rule.id}
rule={rule}
index={index}
columns={columns}
directionOptions={directionOptions}
isReadOnly={isReadOnly}
onRemove={removeRule}
onUpdate={updateRule}
/>
))}
<Button
variant='ghost'
size='sm'
onClick={addRule}
disabled={isReadOnly}
className='self-start'
>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add sort
</Button>
</>
)}
</div>
)
}

View File

@@ -0,0 +1,78 @@
'use client'
import { useCallback, useMemo } from 'react'
import { useParams } from 'next/navigation'
import { Combobox, type ComboboxOption } from '@/components/emcn'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { useTablesList } from '@/hooks/queries/tables'
interface TableSelectorProps {
blockId: string
subBlock: SubBlockConfig
disabled?: boolean
isPreview?: boolean
previewValue?: string | null
}
/**
* Table selector component with dropdown for selecting workspace tables
*
* @remarks
* Provides a dropdown to select workspace tables.
* Uses React Query for efficient data fetching and caching.
* The external link to view the table is rendered in the label row by the parent SubBlock.
*/
export function TableSelector({
blockId,
subBlock,
disabled = false,
isPreview = false,
previewValue,
}: TableSelectorProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [storeValue, setStoreValue] = useSubBlockValue<string>(blockId, subBlock.id)
const {
data: tables = [],
isLoading,
error,
} = useTablesList(isPreview || disabled ? undefined : workspaceId)
const value = isPreview ? previewValue : storeValue
const tableId = typeof value === 'string' ? value : null
const options = useMemo<ComboboxOption[]>(() => {
return tables.map((table) => ({
label: table.name.toLowerCase(),
value: table.id,
}))
}, [tables])
const handleChange = useCallback(
(selectedValue: string) => {
if (isPreview || disabled) return
setStoreValue(selectedValue)
},
[isPreview, disabled, setStoreValue]
)
const errorMessage = error instanceof Error ? error.message : error ? String(error) : undefined
return (
<Combobox
options={options}
value={tableId ?? undefined}
onChange={handleChange}
placeholder={subBlock.placeholder || 'Select a table'}
disabled={disabled || isPreview}
editable={false}
isLoading={isLoading}
error={errorMessage}
searchable={options.length > 5}
searchPlaceholder='Search...'
/>
)
}

View File

@@ -19,11 +19,11 @@ interface TableProps {
subBlockId: string
columns: string[]
isPreview?: boolean
previewValue?: TableRow[] | null
previewValue?: WorkflowTableRow[] | null
disabled?: boolean
}
interface TableRow {
interface WorkflowTableRow {
id: string
cells: Record<string, string>
}
@@ -38,7 +38,7 @@ export function Table({
}: TableProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [storeValue, setStoreValue] = useSubBlockValue<TableRow[]>(blockId, subBlockId)
const [storeValue, setStoreValue] = useSubBlockValue<WorkflowTableRow[]>(blockId, subBlockId)
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
// Use the extended hook for field-level management
@@ -73,7 +73,7 @@ export function Table({
*/
useEffect(() => {
if (!isPreview && !disabled && (!Array.isArray(storeValue) || storeValue.length === 0)) {
const initialRow: TableRow = {
const initialRow: WorkflowTableRow = {
id: crypto.randomUUID(),
cells: { ...emptyCellsTemplate },
}
@@ -110,7 +110,7 @@ export function Table({
}
})
return validatedRows as TableRow[]
return validatedRows as WorkflowTableRow[]
}, [value, emptyCellsTemplate])
// Helper to update a cell value
@@ -164,7 +164,12 @@ export function Table({
</thead>
)
const renderCell = (row: TableRow, rowIndex: number, column: string, cellIndex: number) => {
const renderCell = (
row: WorkflowTableRow,
rowIndex: number,
column: string,
cellIndex: number
) => {
// Defensive programming: ensure row.cells exists and has the expected structure
const hasValidCells = row.cells && typeof row.cells === 'object'
if (!hasValidCells) logger.warn('Table row has malformed cells data:', row)

View File

@@ -357,6 +357,7 @@ const BUILT_IN_TOOL_TYPES = new Set([
'tts',
'stt',
'memory',
'table',
'webhook_request',
'workflow',
])
@@ -614,7 +615,8 @@ export const ToolInput = memo(function ToolInput({
block.type === 'workflow' ||
block.type === 'workflow_input' ||
block.type === 'knowledge' ||
block.type === 'function') &&
block.type === 'function' ||
block.type === 'table') &&
block.type !== 'evaluator' &&
block.type !== 'mcp' &&
block.type !== 'file'

View File

@@ -1,8 +1,17 @@
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
import { type JSX, type MouseEvent, memo, useCallback, useMemo, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
import {
AlertTriangle,
ArrowLeftRight,
ArrowUp,
Check,
Clipboard,
ExternalLink,
} from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn'
import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants'
import {
CheckboxList,
Code,
@@ -15,6 +24,7 @@ import {
EvalInput,
FileSelectorInput,
FileUpload,
FilterBuilder,
FolderSelectorInput,
GroupedCheckboxList,
InputFormat,
@@ -34,8 +44,10 @@ import {
SkillInput,
SlackSelectorInput,
SliderInput,
SortBuilder,
Switch,
Table,
TableSelector,
Text,
TimeInput,
ToolInput,
@@ -202,7 +214,12 @@ const renderLabel = (
copied: boolean
onCopy: () => void
},
labelSuffix?: React.ReactNode
labelSuffix?: React.ReactNode,
externalLink?: {
show: boolean
onClick: () => void
tooltip: string
}
): JSX.Element | null => {
if (config.type === 'switch') return null
if (!config.title) return null
@@ -211,6 +228,7 @@ const renderLabel = (
const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled
const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview
const showCopy = copyState?.showCopyButton && !wandState?.isPreview
const showExternalLink = externalLink?.show && !wandState?.isPreview
const canonicalToggleDisabledResolved = canonicalToggleIsDisabled ?? canonicalToggle?.disabled
return (
@@ -351,6 +369,23 @@ const renderLabel = (
</Tooltip.Content>
</Tooltip.Root>
)}
{showExternalLink && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0'
onClick={externalLink?.onClick}
aria-label={externalLink?.tooltip}
>
<ExternalLink className='!h-[12px] !w-[12px] text-[var(--text-secondary)]' />
</button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>{externalLink?.tooltip}</p>
</Tooltip.Content>
</Tooltip.Root>
)}
</div>
</div>
)
@@ -415,6 +450,9 @@ function SubBlockComponent({
labelSuffix,
dependencyContext,
}: SubBlockProps): JSX.Element {
const params = useParams()
const workspaceId = params.workspaceId as string
const [isValidJson, setIsValidJson] = useState(true)
const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('')
@@ -451,6 +489,30 @@ function SubBlockComponent({
}
}, [webhookManagement?.webhookUrl])
const tableId =
config.type === 'table-selector' && subBlockValues
? (subBlockValues[config.id]?.value as string | null)
: null
const hasSelectedTable = tableId && !tableId.startsWith('<')
const handleNavigateToTable = useCallback(() => {
if (tableId && workspaceId) {
window.open(`/workspace/${workspaceId}/tables/${tableId}`, '_blank')
}
}, [workspaceId, tableId])
const externalLink = useMemo(
() =>
config.type === 'table-selector' && hasSelectedTable
? {
show: true,
onClick: handleNavigateToTable,
tooltip: 'View table',
}
: undefined,
[config.type, hasSelectedTable, handleNavigateToTable]
)
/**
* Handles wand icon click to activate inline prompt mode.
* Focuses the input after a brief delay to ensure DOM is ready.
@@ -584,6 +646,19 @@ function SubBlockComponent({
</div>
)
case 'table-selector':
return (
<div onMouseDown={handleMouseDown}>
<TableSelector
blockId={blockId}
subBlock={config}
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as string | null}
/>
</div>
)
case 'combobox':
return (
<div onMouseDown={handleMouseDown}>
@@ -944,6 +1019,28 @@ function SubBlockComponent({
/>
)
case 'filter-builder':
return (
<FilterBuilder
blockId={blockId}
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue as FilterRule[] | null | undefined}
disabled={isDisabled}
/>
)
case 'sort-builder':
return (
<SortBuilder
blockId={blockId}
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue as SortRule[] | null | undefined}
disabled={isDisabled}
/>
)
case 'channel-selector':
case 'user-selector':
return (
@@ -1060,7 +1157,8 @@ function SubBlockComponent({
copied,
onCopy: handleCopy,
},
labelSuffix
labelSuffix,
externalLink
)}
{renderInput()}
</div>

View File

@@ -9,6 +9,7 @@ import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createMcpToolId } from '@/lib/mcp/shared'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import type { FilterRule, SortRule } from '@/lib/table/types'
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import {
buildCanonicalIndex,
@@ -41,6 +42,7 @@ import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedules'
import { useSkills } from '@/hooks/queries/skills'
import { useTablesList } from '@/hooks/queries/tables'
import { useDeployChildWorkflow } from '@/hooks/queries/workflows'
import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
import { useVariablesStore } from '@/stores/panel'
@@ -55,9 +57,9 @@ const logger = createLogger('WorkflowBlock')
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
/**
* Type guard for table row structure
* Type guard for workflow table row structure (sub-block table inputs)
*/
interface TableRow {
interface WorkflowTableRow {
id: string
cells: Record<string, string>
}
@@ -76,7 +78,7 @@ interface FieldFormat {
/**
* Checks if a value is a table row array
*/
const isTableRowArray = (value: unknown): value is TableRow[] => {
const isTableRowArray = (value: unknown): value is WorkflowTableRow[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
@@ -95,7 +97,11 @@ const isFieldFormatArray = (value: unknown): value is FieldFormat[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' && firstItem !== null && 'id' in firstItem && 'name' in firstItem
typeof firstItem === 'object' &&
firstItem !== null &&
'id' in firstItem &&
'name' in firstItem &&
typeof firstItem.name === 'string'
)
}
@@ -161,7 +167,8 @@ const isTagFilterArray = (value: unknown): value is TagFilterItem[] => {
typeof firstItem === 'object' &&
firstItem !== null &&
'tagName' in firstItem &&
'tagValue' in firstItem
'tagValue' in firstItem &&
typeof firstItem.tagName === 'string'
)
}
@@ -183,7 +190,40 @@ const isDocumentTagArray = (value: unknown): value is DocumentTagItem[] => {
firstItem !== null &&
'tagName' in firstItem &&
'value' in firstItem &&
!('tagValue' in firstItem) // Distinguish from tag filters
!('tagValue' in firstItem) && // Distinguish from tag filters
typeof firstItem.tagName === 'string'
)
}
/**
* Type guard for filter condition array (used in table block filter builder)
*/
const isFilterConditionArray = (value: unknown): value is FilterRule[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' &&
firstItem !== null &&
'column' in firstItem &&
'operator' in firstItem &&
'logicalOperator' in firstItem &&
typeof firstItem.column === 'string'
)
}
/**
* Type guard for sort condition array (used in table block sort builder)
*/
const isSortConditionArray = (value: unknown): value is SortRule[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' &&
firstItem !== null &&
'column' in firstItem &&
'direction' in firstItem &&
typeof firstItem.column === 'string' &&
(firstItem.direction === 'asc' || firstItem.direction === 'desc')
)
}
@@ -231,7 +271,9 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isTagFilterArray(parsedValue)) {
const validFilters = parsedValue.filter((f) => f.tagName?.trim())
const validFilters = parsedValue.filter(
(f) => typeof f.tagName === 'string' && f.tagName.trim() !== ''
)
if (validFilters.length === 0) return '-'
if (validFilters.length === 1) return validFilters[0].tagName
if (validFilters.length === 2) return `${validFilters[0].tagName}, ${validFilters[1].tagName}`
@@ -239,13 +281,54 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isDocumentTagArray(parsedValue)) {
const validTags = parsedValue.filter((t) => t.tagName?.trim())
const validTags = parsedValue.filter(
(t) => typeof t.tagName === 'string' && t.tagName.trim() !== ''
)
if (validTags.length === 0) return '-'
if (validTags.length === 1) return validTags[0].tagName
if (validTags.length === 2) return `${validTags[0].tagName}, ${validTags[1].tagName}`
return `${validTags[0].tagName}, ${validTags[1].tagName} +${validTags.length - 2}`
}
if (isFilterConditionArray(parsedValue)) {
const validConditions = parsedValue.filter(
(c) => typeof c.column === 'string' && c.column.trim() !== ''
)
if (validConditions.length === 0) return '-'
const formatCondition = (c: FilterRule) => {
const opLabels: Record<string, string> = {
eq: '=',
ne: '≠',
gt: '>',
gte: '≥',
lt: '<',
lte: '≤',
contains: '~',
in: 'in',
}
const op = opLabels[c.operator] || c.operator
return `${c.column} ${op} ${c.value || '?'}`
}
if (validConditions.length === 1) return formatCondition(validConditions[0])
if (validConditions.length === 2) {
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])}`
}
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])} +${validConditions.length - 2}`
}
if (isSortConditionArray(parsedValue)) {
const validConditions = parsedValue.filter(
(c) => typeof c.column === 'string' && c.column.trim() !== ''
)
if (validConditions.length === 0) return '-'
const formatSort = (c: SortRule) => `${c.column} ${c.direction === 'desc' ? '↓' : '↑'}`
if (validConditions.length === 1) return formatSort(validConditions[0])
if (validConditions.length === 2) {
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])}`
}
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])} +${validConditions.length - 2}`
}
if (isTableRowArray(parsedValue)) {
const nonEmptyRows = parsedValue.filter((row) => {
const cellValues = Object.values(row.cells)
@@ -267,7 +350,9 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isFieldFormatArray(parsedValue)) {
const namedFields = parsedValue.filter((field) => field.name && field.name.trim() !== '')
const namedFields = parsedValue.filter(
(field) => typeof field.name === 'string' && field.name.trim() !== ''
)
if (namedFields.length === 0) return '-'
if (namedFields.length === 1) return namedFields[0].name
if (namedFields.length === 2) return `${namedFields[0].name}, ${namedFields[1].name}`
@@ -513,6 +598,15 @@ const SubBlockRow = memo(function SubBlockRow({
return tool?.name ?? null
}, [subBlock?.type, rawValue, mcpToolsData])
const { data: tables = [] } = useTablesList(workspaceId || '')
const tableDisplayName = useMemo(() => {
if (subBlock?.id !== 'tableId' || typeof rawValue !== 'string') {
return null
}
const table = tables.find((t) => t.id === rawValue)
return table?.name ?? null
}, [subBlock?.id, rawValue, tables])
const webhookUrlDisplayValue = useMemo(() => {
if (subBlock?.id !== 'webhookUrlDisplay' || !blockId) {
return null
@@ -619,6 +713,27 @@ const SubBlockRow = memo(function SubBlockRow({
return `${toolNames[0]}, ${toolNames[1]} +${toolNames.length - 2}`
}, [subBlock?.type, rawValue, customTools, workspaceId])
const filterDisplayValue = useMemo(() => {
const isFilterField =
subBlock?.id === 'filter' || subBlock?.id === 'filterCriteria' || subBlock?.id === 'sort'
if (!isFilterField || !rawValue) return null
const parsedValue = tryParseJson(rawValue)
if (isPlainObject(parsedValue) || Array.isArray(parsedValue)) {
try {
const jsonStr = JSON.stringify(parsedValue, null, 0)
if (jsonStr.length <= 35) return jsonStr
return `${jsonStr.slice(0, 32)}...`
} catch {
return null
}
}
return null
}, [subBlock?.id, rawValue])
/**
* Hydrates skill references to display names.
* Resolves skill IDs to their current names from the skills query.
@@ -663,18 +778,21 @@ const SubBlockRow = memo(function SubBlockRow({
const isPasswordField = subBlock?.password === true
const maskedValue = isPasswordField && value && value !== '-' ? '•••' : null
const isMonospaceField = Boolean(filterDisplayValue)
const isSelectorType = subBlock?.type && SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlock.type)
const hydratedName =
credentialName ||
dropdownLabel ||
variablesDisplayValue ||
filterDisplayValue ||
toolsDisplayValue ||
skillsDisplayValue ||
knowledgeBaseDisplayName ||
workflowSelectionName ||
mcpServerDisplayName ||
mcpToolDisplayName ||
tableDisplayName ||
webhookUrlDisplayValue ||
selectorDisplayName
const displayValue = maskedValue || hydratedName || (isSelectorType && value ? '-' : value)
@@ -689,7 +807,10 @@ const SubBlockRow = memo(function SubBlockRow({
</span>
{displayValue !== undefined && (
<span
className='flex-1 truncate text-right text-[14px] text-[var(--text-primary)]'
className={cn(
'flex-1 truncate text-right text-[14px] text-[var(--text-primary)]',
isMonospaceField && 'font-mono'
)}
title={displayValue}
>
{displayValue}

View File

@@ -4,23 +4,37 @@ import { useQueryClient } from '@tanstack/react-query'
import { readSSEStream } from '@/lib/core/utils/sse'
import type { GenerationType } from '@/blocks/types'
import { subscriptionKeys } from '@/hooks/queries/subscription'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('useWand')
interface ChatMessage {
role: 'user' | 'assistant' | 'system'
content: string
}
interface BuildWandContextInfoOptions {
currentValue?: string
generationType?: string
}
/**
* Builds rich context information based on current content and generation type
* Builds rich context information based on current content and generation type.
* Note: Table schema context is now fetched server-side in /api/wand for simplicity.
*/
function buildContextInfo(currentValue?: string, generationType?: string): string {
if (!currentValue || currentValue.trim() === '') {
return 'no current content'
}
function buildWandContextInfo({
currentValue,
generationType,
}: BuildWandContextInfoOptions): string {
const hasContent = Boolean(currentValue && currentValue.trim() !== '')
const contentLength = currentValue?.length ?? 0
const lineCount = currentValue ? currentValue.split('\n').length : 0
const contentLength = currentValue.length
const lineCount = currentValue.split('\n').length
let contextInfo = hasContent
? `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
: 'no current content'
let contextInfo = `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
if (generationType) {
if (generationType && currentValue) {
switch (generationType) {
case 'javascript-function-body':
case 'typescript-function-body': {
@@ -33,6 +47,7 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
case 'json-schema':
case 'json-object':
case 'table-schema':
try {
const parsed = JSON.parse(currentValue)
const keys = Object.keys(parsed)
@@ -47,11 +62,6 @@ function buildContextInfo(currentValue?: string, generationType?: string): strin
return contextInfo
}
interface ChatMessage {
role: 'user' | 'assistant' | 'system'
content: string
}
export interface WandConfig {
enabled: boolean
prompt: string
@@ -63,6 +73,9 @@ export interface WandConfig {
interface UseWandProps {
wandConfig?: WandConfig
currentValue?: string
contextParams?: {
tableId?: string | null
}
onGeneratedContent: (content: string) => void
onStreamChunk?: (chunk: string) => void
onStreamStart?: () => void
@@ -72,12 +85,14 @@ interface UseWandProps {
export function useWand({
wandConfig,
currentValue,
contextParams,
onGeneratedContent,
onStreamChunk,
onStreamStart,
onGenerationComplete,
}: UseWandProps) {
const queryClient = useQueryClient()
const workflowId = useWorkflowRegistry((state) => state.hydration.workflowId)
const [isLoading, setIsLoading] = useState(false)
const [isPromptVisible, setIsPromptVisible] = useState(false)
const [promptInputValue, setPromptInputValue] = useState('')
@@ -148,7 +163,10 @@ export function useWand({
}
try {
const contextInfo = buildContextInfo(currentValue, wandConfig?.generationType)
const contextInfo = buildWandContextInfo({
currentValue,
generationType: wandConfig?.generationType,
})
let systemPrompt = wandConfig?.prompt || ''
if (systemPrompt.includes('{context}')) {
@@ -171,6 +189,8 @@ export function useWand({
stream: true,
history: wandConfig?.maintainHistory ? conversationHistory : [],
generationType: wandConfig?.generationType,
workflowId,
wandContext: contextParams?.tableId ? { tableId: contextParams.tableId } : undefined,
}),
signal: abortControllerRef.current.signal,
cache: 'no-store',
@@ -235,6 +255,8 @@ export function useWand({
onStreamStart,
onGenerationComplete,
queryClient,
contextParams?.tableId,
workflowId,
]
)

View File

@@ -2,7 +2,7 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
import { Database, HelpCircle, Layout, Plus, Search, Settings, Table } from 'lucide-react'
import Link from 'next/link'
import { useParams, usePathname, useRouter } from 'next/navigation'
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
@@ -268,6 +268,12 @@ export const Sidebar = memo(function Sidebar() {
href: `/workspace/${workspaceId}/knowledge`,
hidden: permissionConfig.hideKnowledgeBaseTab,
},
{
id: 'tables',
label: 'Tables',
icon: Table,
href: `/workspace/${workspaceId}/tables`,
},
{
id: 'help',
label: 'Help',

View File

@@ -408,6 +408,9 @@ describe.concurrent('Blocks Module', () => {
'workflow-input-mapper',
'text',
'router-input',
'table-selector',
'filter-builder',
'sort-builder',
'skill-input',
]

View File

@@ -0,0 +1,679 @@
import { TableIcon } from '@/components/icons'
import { TABLE_LIMITS } from '@/lib/table/constants'
import { filterRulesToFilter, sortRulesToSort } from '@/lib/table/query-builder/converters'
import type { BlockConfig } from '@/blocks/types'
import type { TableQueryResponse } from '@/tools/table/types'
/**
* Parses a JSON string with helpful error messages.
*
* Handles common issues like unquoted block references in JSON values.
*
* @param value - The value to parse (string or already-parsed object)
* @param fieldName - Name of the field for error messages
* @returns Parsed JSON value
* @throws Error with helpful hints if JSON is invalid
*/
function parseJSON(value: string | unknown, fieldName: string): unknown {
if (typeof value !== 'string') return value
try {
return JSON.parse(value)
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
// Check if the error might be due to unquoted string values
// This happens when users write {"field": <ref>} instead of {"field": "<ref>"}
const unquotedValueMatch = value.match(
/:\s*([a-zA-Z][a-zA-Z0-9_\s]*[a-zA-Z0-9]|[a-zA-Z])\s*[,}]/
)
let hint =
'Make sure all property names are in double quotes (e.g., {"name": "value"} not {name: "value"}).'
if (unquotedValueMatch) {
hint =
'It looks like a string value is not quoted. When using block references in JSON, wrap them in double quotes: {"field": "<blockName.output>"} not {"field": <blockName.output>}.'
}
throw new Error(`Invalid JSON in ${fieldName}: ${errorMsg}. ${hint}`)
}
}
/** Raw params from block UI before JSON parsing and type conversion */
interface TableBlockParams {
operation: string
tableId?: string
rowId?: string
data?: string | unknown
rows?: string | unknown
filter?: string | unknown
sort?: string | unknown
limit?: string
offset?: string
builderMode?: string
filterBuilder?: unknown
sortBuilder?: unknown
bulkFilterMode?: string
bulkFilterBuilder?: unknown
}
/** Normalized params after parsing, ready for tool request body */
interface ParsedParams {
tableId?: string
rowId?: string
data?: unknown
rows?: unknown
filter?: unknown
sort?: unknown
limit?: number
offset?: number
}
/** Transforms raw block params into tool request params for each operation */
const paramTransformers: Record<string, (params: TableBlockParams) => ParsedParams> = {
insert_row: (params) => ({
tableId: params.tableId,
data: parseJSON(params.data, 'Row Data'),
}),
upsert_row: (params) => ({
tableId: params.tableId,
data: parseJSON(params.data, 'Row Data'),
}),
batch_insert_rows: (params) => ({
tableId: params.tableId,
rows: parseJSON(params.rows, 'Rows Data'),
}),
update_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
data: parseJSON(params.data, 'Row Data'),
}),
update_rows_by_filter: (params) => {
let filter: unknown
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
filter =
filterRulesToFilter(
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
) || undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
return {
tableId: params.tableId,
filter,
data: parseJSON(params.data, 'Row Data'),
limit: params.limit ? Number.parseInt(params.limit) : undefined,
}
},
delete_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
}),
delete_rows_by_filter: (params) => {
let filter: unknown
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
filter =
filterRulesToFilter(
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
) || undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
return {
tableId: params.tableId,
filter,
limit: params.limit ? Number.parseInt(params.limit) : undefined,
}
},
get_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
}),
get_schema: (params) => ({
tableId: params.tableId,
}),
query_rows: (params) => {
let filter: unknown
if (params.builderMode === 'builder' && params.filterBuilder) {
filter =
filterRulesToFilter(params.filterBuilder as Parameters<typeof filterRulesToFilter>[0]) ||
undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
let sort: unknown
if (params.builderMode === 'builder' && params.sortBuilder) {
sort =
sortRulesToSort(params.sortBuilder as Parameters<typeof sortRulesToSort>[0]) || undefined
} else if (params.sort) {
sort = parseJSON(params.sort, 'Sort')
}
return {
tableId: params.tableId,
filter,
sort,
limit: params.limit ? Number.parseInt(params.limit) : 100,
offset: params.offset ? Number.parseInt(params.offset) : 0,
}
},
}
export const TableBlock: BlockConfig<TableQueryResponse> = {
type: 'table',
name: 'Table',
description: 'User-defined data tables',
longDescription:
'Create and manage custom data tables. Store, query, and manipulate structured data within workflows.',
docsLink: 'https://docs.simstudio.ai/tools/table',
category: 'blocks',
bgColor: '#10B981',
icon: TableIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Query Rows', id: 'query_rows' },
{ label: 'Insert Row', id: 'insert_row' },
{ label: 'Upsert Row', id: 'upsert_row' },
{ label: 'Batch Insert Rows', id: 'batch_insert_rows' },
{ label: 'Update Rows by Filter', id: 'update_rows_by_filter' },
{ label: 'Delete Rows by Filter', id: 'delete_rows_by_filter' },
{ label: 'Update Row by ID', id: 'update_row' },
{ label: 'Delete Row by ID', id: 'delete_row' },
{ label: 'Get Row by ID', id: 'get_row' },
{ label: 'Get Schema', id: 'get_schema' },
],
value: () => 'query_rows',
},
// Table selector (for all operations)
{
id: 'tableId',
title: 'Table',
type: 'table-selector',
placeholder: 'Select a table',
required: true,
},
// Row ID for get/update/delete
{
id: 'rowId',
title: 'Row ID',
type: 'short-input',
placeholder: 'row_xxxxx',
condition: { field: 'operation', value: ['get_row', 'update_row', 'delete_row'] },
required: true,
},
// Insert/Update/Upsert Row data (single row)
{
id: 'data',
title: 'Row Data (JSON)',
type: 'code',
placeholder: '{"column_name": "value"}',
condition: {
field: 'operation',
value: ['insert_row', 'upsert_row', 'update_row', 'update_rows_by_filter'],
},
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate row data as a JSON object matching the table's column schema.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON object with field values based on the table's columns. No explanations or markdown.
IMPORTANT: Reference the table schema visible in the table selector to know which columns exist and their types.
### EXAMPLES
Table with columns: email (string), name (string), age (number)
"user with email john@example.com and age 25"
→ {"email": "john@example.com", "name": "John", "age": 25}
Table with columns: customer_id (string), total (number), status (string)
"order with customer ID 123, total 99.99, status pending"
→ {"customer_id": "123", "total": 99.99, "status": "pending"}
Return ONLY the data JSON:`,
generationType: 'table-schema',
},
},
// Batch Insert - multiple rows
{
id: 'rows',
title: 'Rows Data (Array of JSON)',
type: 'code',
placeholder: '[{"col1": "val1"}, {"col1": "val2"}]',
condition: { field: 'operation', value: 'batch_insert_rows' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate an array of row data objects matching the table's column schema.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON array of objects. Each object represents one row. No explanations or markdown.
Maximum ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows per batch.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### EXAMPLES
Table with columns: email (string), name (string), age (number)
"3 users: john@example.com age 25, jane@example.com age 30, bob@example.com age 28"
→ [
{"email": "john@example.com", "name": "John", "age": 25},
{"email": "jane@example.com", "name": "Jane", "age": 30},
{"email": "bob@example.com", "name": "Bob", "age": 28}
]
Return ONLY the rows array:`,
generationType: 'table-schema',
},
},
// Filter mode selector for bulk operations
{
id: 'bulkFilterMode',
title: 'Filter Mode',
type: 'dropdown',
options: [
{ label: 'Builder', id: 'builder' },
{ label: 'Editor', id: 'json' },
],
value: () => 'builder',
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
},
},
// Filter builder for bulk operations (visual)
{
id: 'bulkFilterBuilder',
title: 'Filter Conditions',
type: 'filter-builder',
required: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
},
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
and: { field: 'bulkFilterMode', value: 'builder' },
},
},
// Filter for update/delete operations (JSON editor - bulk ops)
{
id: 'filter',
title: 'Filter',
type: 'code',
placeholder: '{"column_name": {"$eq": "value"}}',
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
and: { field: 'bulkFilterMode', value: 'json' },
},
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate filter criteria for selecting rows in a table.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON filter object. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### OPERATORS
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
- **$ne**: Not equals - {"column": {"$ne": "value"}}
- **$gt**: Greater than - {"column": {"$gt": 18}}
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
- **$lt**: Less than - {"column": {"$lt": 90}}
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
- **$contains**: String contains - {"column": {"$contains": "text"}}
### EXAMPLES
"rows where status is active"
→ {"status": "active"}
"rows where age is over 18 and status is pending"
→ {"age": {"$gte": 18}, "status": "pending"}
"rows where email contains gmail.com"
→ {"email": {"$contains": "gmail.com"}}
Return ONLY the filter JSON:`,
generationType: 'table-schema',
},
},
// Builder mode selector for query_rows (controls both filter and sort)
{
id: 'builderMode',
title: 'Input Mode',
type: 'dropdown',
options: [
{ label: 'Builder', id: 'builder' },
{ label: 'Editor', id: 'json' },
],
value: () => 'builder',
condition: { field: 'operation', value: 'query_rows' },
},
// Filter builder (visual)
{
id: 'filterBuilder',
title: 'Filter Conditions',
type: 'filter-builder',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder' },
},
},
// Sort builder (visual)
{
id: 'sortBuilder',
title: 'Sort Order',
type: 'sort-builder',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder' },
},
},
// Filter for query_rows (JSON editor mode or tool call context)
{
id: 'filter',
title: 'Filter',
type: 'code',
placeholder: '{"column_name": {"$eq": "value"}}',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder', not: true },
},
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate filter criteria for selecting rows in a table.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON filter object. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### OPERATORS
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
- **$ne**: Not equals - {"column": {"$ne": "value"}}
- **$gt**: Greater than - {"column": {"$gt": 18}}
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
- **$lt**: Less than - {"column": {"$lt": 90}}
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
- **$contains**: String contains - {"column": {"$contains": "text"}}
### EXAMPLES
"rows where status is active"
→ {"status": "active"}
"rows where age is over 18 and status is pending"
→ {"age": {"$gte": 18}, "status": "pending"}
"rows where email contains gmail.com"
→ {"email": {"$contains": "gmail.com"}}
Return ONLY the filter JSON:`,
generationType: 'table-schema',
},
},
// Sort (JSON editor or tool call context)
{
id: 'sort',
title: 'Sort',
type: 'code',
placeholder: '{"column_name": "desc"}',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder', not: true },
},
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate sort order for table query results.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON object specifying sort order. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist. You can sort by any column or the built-in columns (createdAt, updatedAt).
### FORMAT
{"column_name": "asc" or "desc"}
You can specify multiple columns for multi-level sorting.
### EXAMPLES
Table with columns: name (string), age (number), email (string), createdAt (date)
"sort by newest first"
→ {"createdAt": "desc"}
"sort by name alphabetically"
→ {"name": "asc"}
"sort by age descending"
→ {"age": "desc"}
"sort by age descending, then name ascending"
→ {"age": "desc", "name": "asc"}
"sort by oldest created first"
→ {"createdAt": "asc"}
Return ONLY the sort JSON:`,
generationType: 'table-schema',
},
},
{
id: 'limit',
title: 'Limit',
type: 'short-input',
placeholder: '100',
condition: {
field: 'operation',
value: ['query_rows', 'update_rows_by_filter', 'delete_rows_by_filter'],
},
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'query_rows' },
value: () => '0',
},
],
tools: {
access: [
'table_insert_row',
'table_batch_insert_rows',
'table_upsert_row',
'table_update_row',
'table_update_rows_by_filter',
'table_delete_row',
'table_delete_rows_by_filter',
'table_query_rows',
'table_get_row',
'table_get_schema',
],
config: {
tool: (params) => {
const toolMap: Record<string, string> = {
insert_row: 'table_insert_row',
batch_insert_rows: 'table_batch_insert_rows',
upsert_row: 'table_upsert_row',
update_row: 'table_update_row',
update_rows_by_filter: 'table_update_rows_by_filter',
delete_row: 'table_delete_row',
delete_rows_by_filter: 'table_delete_rows_by_filter',
query_rows: 'table_query_rows',
get_row: 'table_get_row',
get_schema: 'table_get_schema',
}
return toolMap[params.operation] || 'table_query_rows'
},
params: (params) => {
const { operation, ...rest } = params
const transformer = paramTransformers[operation]
if (transformer) {
return transformer(rest as TableBlockParams)
}
return rest
},
},
},
inputs: {
operation: { type: 'string', description: 'Table operation to perform' },
tableId: { type: 'string', description: 'Table identifier' },
data: { type: 'json', description: 'Row data for insert/update' },
rows: { type: 'array', description: 'Array of row data for batch insert' },
rowId: { type: 'string', description: 'Row identifier for ID-based operations' },
bulkFilterMode: {
type: 'string',
description: 'Filter input mode for bulk operations (builder or json)',
},
bulkFilterBuilder: {
type: 'json',
description: 'Visual filter builder conditions for bulk operations',
},
filter: { type: 'json', description: 'Filter criteria for query/update/delete operations' },
limit: { type: 'number', description: 'Query or bulk operation limit' },
builderMode: {
type: 'string',
description: 'Input mode for filter and sort (builder or json)',
},
filterBuilder: { type: 'json', description: 'Visual filter builder conditions' },
sortBuilder: { type: 'json', description: 'Visual sort builder conditions' },
sort: { type: 'json', description: 'Sort order (JSON)' },
offset: { type: 'number', description: 'Query result offset' },
},
outputs: {
success: { type: 'boolean', description: 'Operation success status' },
row: {
type: 'json',
description: 'Single row data',
condition: {
field: 'operation',
value: ['get_row', 'insert_row', 'upsert_row', 'update_row'],
},
},
operation: {
type: 'string',
description: 'Operation performed (insert or update)',
condition: { field: 'operation', value: 'upsert_row' },
},
rows: {
type: 'array',
description: 'Array of rows',
condition: { field: 'operation', value: ['query_rows', 'batch_insert_rows'] },
},
rowCount: {
type: 'number',
description: 'Number of rows returned',
condition: { field: 'operation', value: 'query_rows' },
},
totalCount: {
type: 'number',
description: 'Total rows matching filter',
condition: { field: 'operation', value: 'query_rows' },
},
insertedCount: {
type: 'number',
description: 'Number of rows inserted',
condition: { field: 'operation', value: 'batch_insert_rows' },
},
updatedCount: {
type: 'number',
description: 'Number of rows updated',
condition: { field: 'operation', value: 'update_rows_by_filter' },
},
updatedRowIds: {
type: 'array',
description: 'IDs of updated rows',
condition: { field: 'operation', value: 'update_rows_by_filter' },
},
deletedCount: {
type: 'number',
description: 'Number of rows deleted',
condition: { field: 'operation', value: ['delete_row', 'delete_rows_by_filter'] },
},
deletedRowIds: {
type: 'array',
description: 'IDs of deleted rows',
condition: { field: 'operation', value: 'delete_rows_by_filter' },
},
name: {
type: 'string',
description: 'Table name',
condition: { field: 'operation', value: 'get_schema' },
},
columns: {
type: 'array',
description: 'Column definitions',
condition: { field: 'operation', value: 'get_schema' },
},
message: { type: 'string', description: 'Operation status message' },
},
}

View File

@@ -136,6 +136,7 @@ import { StarterBlock } from '@/blocks/blocks/starter'
import { StripeBlock } from '@/blocks/blocks/stripe'
import { SttBlock, SttV2Block } from '@/blocks/blocks/stt'
import { SupabaseBlock } from '@/blocks/blocks/supabase'
import { TableBlock } from '@/blocks/blocks/table'
import { TavilyBlock } from '@/blocks/blocks/tavily'
import { TelegramBlock } from '@/blocks/blocks/telegram'
import { TextractBlock, TextractV2Block } from '@/blocks/blocks/textract'
@@ -323,6 +324,7 @@ export const registry: Record<string, BlockConfig> = {
stt: SttBlock,
stt_v2: SttV2Block,
supabase: SupabaseBlock,
table: TableBlock,
tavily: TavilyBlock,
telegram: TelegramBlock,
textract: TextractBlock,

View File

@@ -27,6 +27,7 @@ export type GenerationType =
| 'typescript-function-body'
| 'json-schema'
| 'json-object'
| 'table-schema'
| 'system-prompt'
| 'custom-tool-schema'
| 'sql-query'
@@ -77,6 +78,8 @@ export type SubBlockType =
| 'mcp-dynamic-args' // MCP dynamic arguments based on tool schema
| 'input-format' // Input structure format
| 'response-format' // Response structure format
| 'filter-builder' // Filter conditions builder
| 'sort-builder' // Sort conditions builder
/**
* @deprecated Legacy trigger save subblock type.
*/
@@ -89,6 +92,7 @@ export type SubBlockType =
| 'workflow-input-mapper' // Dynamic workflow input mapper based on selected workflow
| 'text' // Read-only text display
| 'router-input' // Router route definitions with descriptions
| 'table-selector' // Table selector with link to view table
/**
* Selector types that require display name hydration
@@ -108,6 +112,7 @@ export const SELECTOR_TYPES_HYDRATION_REQUIRED: SubBlockType[] = [
'variables-input',
'mcp-server-selector',
'mcp-tool-selector',
'table-selector',
] as const
export type ExtractToolOutput<T> = T extends ToolResponse ? T['output'] : never

View File

@@ -4964,6 +4964,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TableIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth={2}
strokeLinecap='round'
strokeLinejoin='round'
{...props}
>
<rect width='18' height='18' x='3' y='3' rx='2' />
<path d='M3 9h18' />
<path d='M3 15h18' />
<path d='M9 3v18' />
<path d='M15 3v18' />
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -73,6 +73,7 @@ const DialogContent = React.forwardRef<
}}
{...props}
>
<DialogPrimitive.Title>Dialog</DialogPrimitive.Title>
{children}
{!hideCloseButton && (
<DialogPrimitive.Close

View File

@@ -0,0 +1,375 @@
/**
* React Query hooks for managing user-defined tables.
*/
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { Filter, Sort, TableDefinition, TableRow } from '@/lib/table'
export const tableKeys = {
all: ['tables'] as const,
lists: () => [...tableKeys.all, 'list'] as const,
list: (workspaceId?: string) => [...tableKeys.lists(), workspaceId ?? ''] as const,
details: () => [...tableKeys.all, 'detail'] as const,
detail: (tableId: string) => [...tableKeys.details(), tableId] as const,
rowsRoot: (tableId: string) => [...tableKeys.detail(tableId), 'rows'] as const,
rows: (tableId: string, paramsKey: string) =>
[...tableKeys.rowsRoot(tableId), paramsKey] as const,
}
interface TableRowsParams {
workspaceId: string
tableId: string
limit: number
offset: number
filter?: Filter | null
sort?: Sort | null
}
interface TableRowsResponse {
rows: TableRow[]
totalCount: number
}
interface RowMutationContext {
workspaceId: string
tableId: string
}
interface UpdateTableRowParams {
rowId: string
data: Record<string, unknown>
}
interface TableRowsDeleteResult {
deletedRowIds: string[]
}
function createRowsParamsKey({
limit,
offset,
filter,
sort,
}: Omit<TableRowsParams, 'workspaceId' | 'tableId'>): string {
return JSON.stringify({
limit,
offset,
filter: filter ?? null,
sort: sort ?? null,
})
}
async function fetchTable(workspaceId: string, tableId: string): Promise<TableDefinition> {
const res = await fetch(`/api/table/${tableId}?workspaceId=${encodeURIComponent(workspaceId)}`)
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to fetch table')
}
const json: { data?: { table: TableDefinition }; table?: TableDefinition } = await res.json()
const data = json.data || json
return (data as { table: TableDefinition }).table
}
async function fetchTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
}: TableRowsParams): Promise<TableRowsResponse> {
const searchParams = new URLSearchParams({
workspaceId,
limit: String(limit),
offset: String(offset),
})
if (filter) {
searchParams.set('filter', JSON.stringify(filter))
}
if (sort) {
searchParams.set('sort', JSON.stringify(sort))
}
const res = await fetch(`/api/table/${tableId}/rows?${searchParams}`)
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to fetch rows')
}
const json: {
data?: { rows: TableRow[]; totalCount: number }
rows?: TableRow[]
totalCount?: number
} = await res.json()
const data = json.data || json
return {
rows: (data.rows || []) as TableRow[],
totalCount: data.totalCount || 0,
}
}
function invalidateTableData(
queryClient: ReturnType<typeof useQueryClient>,
workspaceId: string,
tableId: string
) {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
queryClient.invalidateQueries({ queryKey: tableKeys.detail(tableId) })
queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) })
}
/**
* Fetch all tables for a workspace.
*/
export function useTablesList(workspaceId?: string) {
return useQuery({
queryKey: tableKeys.list(workspaceId),
queryFn: async () => {
if (!workspaceId) throw new Error('Workspace ID required')
const res = await fetch(`/api/table?workspaceId=${encodeURIComponent(workspaceId)}`)
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to fetch tables')
}
const response = await res.json()
return (response.data?.tables || []) as TableDefinition[]
},
enabled: Boolean(workspaceId),
staleTime: 30 * 1000,
})
}
/**
* Fetch a single table by id.
*/
export function useTable(workspaceId: string | undefined, tableId: string | undefined) {
return useQuery({
queryKey: tableKeys.detail(tableId ?? ''),
queryFn: () => fetchTable(workspaceId as string, tableId as string),
enabled: Boolean(workspaceId && tableId),
staleTime: 30 * 1000,
})
}
/**
* Fetch rows for a table with pagination/filter/sort.
*/
export function useTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
enabled = true,
}: TableRowsParams & { enabled?: boolean }) {
const paramsKey = createRowsParamsKey({ limit, offset, filter, sort })
return useQuery({
queryKey: tableKeys.rows(tableId, paramsKey),
queryFn: () =>
fetchTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
}),
enabled: Boolean(workspaceId && tableId) && enabled,
placeholderData: keepPreviousData,
})
}
/**
* Create a new table in a workspace.
*/
export function useCreateTable(workspaceId: string) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (params: {
name: string
description?: string
schema: { columns: Array<{ name: string; type: string; required?: boolean }> }
}) => {
const res = await fetch('/api/table', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...params, workspaceId }),
})
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to create table')
}
return res.json()
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})
}
/**
* Delete a table from a workspace.
*/
export function useDeleteTable(workspaceId: string) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (tableId: string) => {
const res = await fetch(
`/api/table/${tableId}?workspaceId=${encodeURIComponent(workspaceId)}`,
{
method: 'DELETE',
}
)
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to delete table')
}
return res.json()
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})
}
/**
* Create a row in a table.
*/
export function useCreateTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (data: Record<string, unknown>) => {
const res = await fetch(`/api/table/${tableId}/rows`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, data }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to add row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Update a single row in a table.
*/
export function useUpdateTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ rowId, data }: UpdateTableRowParams) => {
const res = await fetch(`/api/table/${tableId}/rows/${rowId}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, data }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to update row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Delete a single row from a table.
*/
export function useDeleteTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (rowId: string) => {
const res = await fetch(`/api/table/${tableId}/rows/${rowId}`, {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to delete row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Delete multiple rows from a table.
* Returns both deleted ids and failure details for partial-failure UI.
*/
export function useDeleteTableRows({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (rowIds: string[]): Promise<TableRowsDeleteResult> => {
const uniqueRowIds = Array.from(new Set(rowIds))
const res = await fetch(`/api/table/${tableId}/rows`, {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, rowIds: uniqueRowIds }),
})
const json: {
error?: string
data?: { deletedRowIds?: string[]; missingRowIds?: string[]; requestedCount?: number }
} = await res.json().catch(() => ({}))
if (!res.ok) {
throw new Error(json.error || 'Failed to delete rows')
}
const deletedRowIds = json.data?.deletedRowIds || []
const missingRowIds = json.data?.missingRowIds || []
if (missingRowIds.length > 0) {
const failureCount = missingRowIds.length
const totalCount = json.data?.requestedCount ?? uniqueRowIds.length
const successCount = deletedRowIds.length
const firstMissing = missingRowIds[0]
throw new Error(
`Failed to delete ${failureCount} of ${totalCount} row(s)${successCount > 0 ? ` (${successCount} deleted successfully)` : ''}. Row not found: ${firstMissing}`
)
}
return { deletedRowIds }
},
onSettled: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}

View File

@@ -0,0 +1,303 @@
/**
* @vitest-environment node
*
* SQL Builder Unit Tests
*
* Tests for the table SQL query builder utilities including filter and sort clause generation.
*/
import { drizzleOrmMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
vi.mock('drizzle-orm', () => drizzleOrmMock)
import { buildFilterClause, buildSortClause } from '../sql'
import type { Filter } from '../types'
describe('SQL Builder', () => {
describe('buildFilterClause', () => {
const tableName = 'user_table_rows'
it('should return undefined for empty filter', () => {
const result = buildFilterClause({}, tableName)
expect(result).toBeUndefined()
})
it('should handle simple equality filter', () => {
const filter: Filter = { name: 'John' }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $eq operator', () => {
const filter: Filter = { status: { $eq: 'active' } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $ne operator', () => {
const filter: Filter = { status: { $ne: 'deleted' } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $gt operator', () => {
const filter: Filter = { age: { $gt: 18 } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $gte operator', () => {
const filter: Filter = { age: { $gte: 18 } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $lt operator', () => {
const filter: Filter = { age: { $lt: 65 } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $lte operator', () => {
const filter: Filter = { age: { $lte: 65 } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $in operator with single value', () => {
const filter: Filter = { status: { $in: ['active'] } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $in operator with multiple values', () => {
const filter: Filter = { status: { $in: ['active', 'pending'] } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $nin operator', () => {
const filter: Filter = { status: { $nin: ['deleted', 'archived'] } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $contains operator', () => {
const filter: Filter = { name: { $contains: 'john' } }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $or logical operator', () => {
const filter: Filter = {
$or: [{ status: 'active' }, { status: 'pending' }],
}
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle $and logical operator', () => {
const filter: Filter = {
$and: [{ status: 'active' }, { age: { $gt: 18 } }],
}
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle multiple conditions combined with AND', () => {
const filter: Filter = {
status: 'active',
age: { $gt: 18 },
}
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle nested $or and $and', () => {
const filter: Filter = {
$or: [{ $and: [{ status: 'active' }, { verified: true }] }, { role: 'admin' }],
}
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should throw error for invalid field name', () => {
const filter: Filter = { 'invalid-field': 'value' }
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
})
it('should throw error for invalid operator', () => {
const filter = { name: { $invalid: 'value' } } as unknown as Filter
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid operator')
})
it('should skip undefined values', () => {
const filter: Filter = { name: undefined, status: 'active' }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle boolean values', () => {
const filter: Filter = { active: true }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle null values', () => {
const filter: Filter = { deleted_at: null }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
it('should handle numeric values', () => {
const filter: Filter = { count: 42 }
const result = buildFilterClause(filter, tableName)
expect(result).toBeDefined()
})
})
describe('buildSortClause', () => {
const tableName = 'user_table_rows'
it('should return undefined for empty sort', () => {
const result = buildSortClause({}, tableName)
expect(result).toBeUndefined()
})
it('should handle single field ascending sort', () => {
const sort = { name: 'asc' as const }
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
it('should handle single field descending sort', () => {
const sort = { name: 'desc' as const }
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
it('should handle multiple fields sort', () => {
const sort = { name: 'asc' as const, created_at: 'desc' as const }
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
it('should handle createdAt field directly', () => {
const sort = { createdAt: 'desc' as const }
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
it('should handle updatedAt field directly', () => {
const sort = { updatedAt: 'asc' as const }
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
it('should throw error for invalid field name', () => {
const sort = { 'invalid-field': 'asc' as const }
expect(() => buildSortClause(sort, tableName)).toThrow('Invalid field name')
})
it('should throw error for invalid direction', () => {
const sort = { name: 'invalid' as 'asc' | 'desc' }
expect(() => buildSortClause(sort, tableName)).toThrow('Invalid sort direction')
})
it('should handle numeric column type for proper numeric sorting', () => {
const sort = { salary: 'desc' as const }
const columns = [{ name: 'salary', type: 'number' as const }]
const result = buildSortClause(sort, tableName, columns)
expect(result).toBeDefined()
})
it('should handle date column type for chronological sorting', () => {
const sort = { birthDate: 'asc' as const }
const columns = [{ name: 'birthDate', type: 'date' as const }]
const result = buildSortClause(sort, tableName, columns)
expect(result).toBeDefined()
})
it('should use text sorting for string columns', () => {
const sort = { name: 'asc' as const }
const columns = [{ name: 'name', type: 'string' as const }]
const result = buildSortClause(sort, tableName, columns)
expect(result).toBeDefined()
})
it('should fall back to text sorting when column type is unknown', () => {
const sort = { unknownField: 'asc' as const }
// No columns provided
const result = buildSortClause(sort, tableName)
expect(result).toBeDefined()
})
})
describe('Field Name Validation', () => {
const tableName = 'user_table_rows'
it('should accept valid field names', () => {
const validNames = ['name', 'user_id', '_private', 'Count123', 'a']
for (const name of validNames) {
const filter: Filter = { [name]: 'value' }
expect(() => buildFilterClause(filter, tableName)).not.toThrow()
}
})
it('should reject field names starting with number', () => {
const filter: Filter = { '123name': 'value' }
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
})
it('should reject field names with special characters', () => {
const invalidNames = ['field-name', 'field.name', 'field name', 'field@name']
for (const name of invalidNames) {
const filter: Filter = { [name]: 'value' }
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
}
})
it('should reject SQL injection attempts', () => {
const sqlInjectionAttempts = ["'; DROP TABLE users; --", 'name OR 1=1', 'name; DELETE FROM']
for (const attempt of sqlInjectionAttempts) {
const filter: Filter = { [attempt]: 'value' }
expect(() => buildFilterClause(filter, tableName)).toThrow('Invalid field name')
}
})
})
})

View File

@@ -0,0 +1,366 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import { TABLE_LIMITS } from '../constants'
import {
type ColumnDefinition,
getUniqueColumns,
type TableSchema,
validateColumnDefinition,
validateRowAgainstSchema,
validateRowSize,
validateTableName,
validateTableSchema,
validateUniqueConstraints,
} from '../validation'
describe('Validation', () => {
describe('validateTableName', () => {
it('should accept valid table names', () => {
const validNames = ['users', 'user_data', '_private', 'Users123', 'a']
for (const name of validNames) {
const result = validateTableName(name)
expect(result.valid).toBe(true)
expect(result.errors).toHaveLength(0)
}
})
it('should reject empty name', () => {
const result = validateTableName('')
expect(result.valid).toBe(false)
expect(result.errors).toContain('Table name is required')
})
it('should reject null/undefined name', () => {
const result1 = validateTableName(null as unknown as string)
expect(result1.valid).toBe(false)
const result2 = validateTableName(undefined as unknown as string)
expect(result2.valid).toBe(false)
})
it('should reject names starting with number', () => {
const result = validateTableName('123table')
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must start with letter or underscore')
})
it('should reject names with special characters', () => {
const invalidNames = ['table-name', 'table.name', 'table name', 'table@name']
for (const name of invalidNames) {
const result = validateTableName(name)
expect(result.valid).toBe(false)
}
})
it('should reject names exceeding max length', () => {
const longName = 'a'.repeat(TABLE_LIMITS.MAX_TABLE_NAME_LENGTH + 1)
const result = validateTableName(longName)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum length')
})
})
describe('validateColumnDefinition', () => {
it('should accept valid column definition', () => {
const column: ColumnDefinition = {
name: 'email',
type: 'string',
required: true,
unique: true,
}
const result = validateColumnDefinition(column)
expect(result.valid).toBe(true)
})
it('should accept all valid column types', () => {
const types = ['string', 'number', 'boolean', 'date', 'json'] as const
for (const type of types) {
const result = validateColumnDefinition({ name: 'test', type })
expect(result.valid).toBe(true)
}
})
it('should reject empty column name', () => {
const result = validateColumnDefinition({ name: '', type: 'string' })
expect(result.valid).toBe(false)
expect(result.errors).toContain('Column name is required')
})
it('should reject invalid column type', () => {
const result = validateColumnDefinition({
name: 'test',
type: 'invalid' as any,
})
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('invalid type')
})
it('should reject column name exceeding max length', () => {
const longName = 'a'.repeat(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH + 1)
const result = validateColumnDefinition({ name: longName, type: 'string' })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum length')
})
})
describe('validateTableSchema', () => {
it('should accept valid schema', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', required: true, unique: true },
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
],
}
const result = validateTableSchema(schema)
expect(result.valid).toBe(true)
})
it('should reject empty columns array', () => {
const schema: TableSchema = { columns: [] }
const result = validateTableSchema(schema)
expect(result.valid).toBe(false)
expect(result.errors).toContain('Schema must have at least one column')
})
it('should reject duplicate column names', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string' },
{ name: 'ID', type: 'number' },
],
}
const result = validateTableSchema(schema)
expect(result.valid).toBe(false)
expect(result.errors).toContain('Duplicate column names found')
})
it('should reject null schema', () => {
const result = validateTableSchema(null as unknown as TableSchema)
expect(result.valid).toBe(false)
expect(result.errors).toContain('Schema is required')
})
it('should reject schema without columns array', () => {
const result = validateTableSchema({} as TableSchema)
expect(result.valid).toBe(false)
expect(result.errors).toContain('Schema must have columns array')
})
it('should reject schema exceeding max columns', () => {
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) => ({
name: `col_${i}`,
type: 'string' as const,
}))
const result = validateTableSchema({ columns })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum columns')
})
})
describe('validateRowSize', () => {
it('should accept row within size limit', () => {
const data = { name: 'test', value: 123 }
const result = validateRowSize(data)
expect(result.valid).toBe(true)
})
it('should reject row exceeding size limit', () => {
const largeString = 'a'.repeat(TABLE_LIMITS.MAX_ROW_SIZE_BYTES + 1)
const data = { content: largeString }
const result = validateRowSize(data)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds limit')
})
})
describe('validateRowAgainstSchema', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
{ name: 'active', type: 'boolean' },
{ name: 'created', type: 'date' },
{ name: 'metadata', type: 'json' },
],
}
it('should accept valid row data', () => {
const data = {
name: 'John',
age: 30,
active: true,
created: '2024-01-01',
metadata: { key: 'value' },
}
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(true)
})
it('should reject missing required field', () => {
const data = { age: 30 }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors).toContain('Missing required field: name')
})
it('should reject wrong type for string field', () => {
const data = { name: 123 }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be string')
})
it('should reject wrong type for number field', () => {
const data = { name: 'John', age: 'thirty' }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be number')
})
it('should reject NaN for number field', () => {
const data = { name: 'John', age: Number.NaN }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be number')
})
it('should reject wrong type for boolean field', () => {
const data = { name: 'John', active: 'yes' }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be boolean')
})
it('should reject invalid date string', () => {
const data = { name: 'John', created: 'not-a-date' }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be valid date')
})
it('should accept valid ISO date string', () => {
const data = { name: 'John', created: '2024-01-15T10:30:00Z' }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(true)
})
it('should accept Date object', () => {
const data = { name: 'John', created: new Date() }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(true)
})
it('should allow null for optional fields', () => {
const data = { name: 'John', age: null }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(true)
})
it('should allow undefined for optional fields', () => {
const data = { name: 'John' }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(true)
})
it('should reject string exceeding max length', () => {
const longString = 'a'.repeat(TABLE_LIMITS.MAX_STRING_VALUE_LENGTH + 1)
const data = { name: longString }
const result = validateRowAgainstSchema(data, schema)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds max string length')
})
})
describe('getUniqueColumns', () => {
it('should return only columns with unique=true', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
{ name: 'count', type: 'number', unique: false },
],
}
const result = getUniqueColumns(schema)
expect(result).toHaveLength(2)
expect(result.map((c) => c.name)).toEqual(['id', 'email'])
})
it('should return empty array when no unique columns', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string' },
{ name: 'value', type: 'number' },
],
}
const result = getUniqueColumns(schema)
expect(result).toHaveLength(0)
})
})
describe('validateUniqueConstraints', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
],
}
const existingRows = [
{ id: 'row1', data: { id: 'abc123', email: 'john@example.com', name: 'John' } },
{ id: 'row2', data: { id: 'def456', email: 'jane@example.com', name: 'Jane' } },
]
it('should accept data with unique values', () => {
const data = { id: 'xyz789', email: 'new@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(true)
})
it('should reject duplicate unique value', () => {
const data = { id: 'abc123', email: 'new@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('must be unique')
expect(result.errors[0]).toContain('abc123')
})
it('should be case-insensitive for string comparisons', () => {
const data = { id: 'ABC123', email: 'new@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(false)
})
it('should exclude specified row from checks (for updates)', () => {
const data = { id: 'abc123', email: 'john@example.com', name: 'John Updated' }
const result = validateUniqueConstraints(data, schema, existingRows, 'row1')
expect(result.valid).toBe(true)
})
it('should allow null values for unique columns', () => {
const data = { id: null, email: 'new@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(true)
})
it('should allow undefined values for unique columns', () => {
const data = { email: 'new@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(true)
})
it('should report multiple violations', () => {
const data = { id: 'abc123', email: 'john@example.com', name: 'New User' }
const result = validateUniqueConstraints(data, schema, existingRows)
expect(result.valid).toBe(false)
expect(result.errors).toHaveLength(2)
})
})
})

View File

@@ -0,0 +1,83 @@
/**
* Billing helpers for table feature limits.
*
* Uses workspace billing account to determine plan-based limits.
*/
import { createLogger } from '@sim/logger'
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import { type PlanName, TABLE_PLAN_LIMITS, type TablePlanLimits } from './constants'
const logger = createLogger('TableBilling')
/**
* Gets the table limits for a workspace based on its billing plan.
*
* Uses the workspace's billed account user to determine the subscription plan,
* then returns the corresponding table limits.
*
* @param workspaceId - The workspace ID to get limits for
* @returns Table limits based on the workspace's billing plan
*/
export async function getWorkspaceTableLimits(workspaceId: string): Promise<TablePlanLimits> {
try {
const billedAccountUserId = await getWorkspaceBilledAccountUserId(workspaceId)
if (!billedAccountUserId) {
logger.warn('No billed account found for workspace, using free tier limits', { workspaceId })
return TABLE_PLAN_LIMITS.free
}
const subscriptionState = await getUserSubscriptionState(billedAccountUserId)
const planName = subscriptionState.planName as PlanName
const limits = TABLE_PLAN_LIMITS[planName] ?? TABLE_PLAN_LIMITS.free
logger.info('Retrieved workspace table limits', {
workspaceId,
billedAccountUserId,
planName,
limits,
})
return limits
} catch (error) {
logger.error('Error getting workspace table limits, falling back to free tier', {
workspaceId,
error,
})
return TABLE_PLAN_LIMITS.free
}
}
/**
* Checks if a workspace can create more tables based on its plan limits.
*
* @param workspaceId - The workspace ID to check
* @param currentTableCount - The current number of tables in the workspace
* @returns Object with canCreate boolean and limit info
*/
export async function canCreateTable(
workspaceId: string,
currentTableCount: number
): Promise<{ canCreate: boolean; maxTables: number; currentCount: number }> {
const limits = await getWorkspaceTableLimits(workspaceId)
return {
canCreate: currentTableCount < limits.maxTables,
maxTables: limits.maxTables,
currentCount: currentTableCount,
}
}
/**
* Gets the maximum rows allowed per table for a workspace based on its plan.
*
* @param workspaceId - The workspace ID
* @returns Maximum rows per table (-1 for unlimited)
*/
export async function getMaxRowsPerTable(workspaceId: string): Promise<number> {
const limits = await getWorkspaceTableLimits(workspaceId)
return limits.maxRowsPerTable
}

View File

@@ -0,0 +1,59 @@
/**
* Limits and constants for user-defined tables.
*/
export const TABLE_LIMITS = {
MAX_TABLES_PER_WORKSPACE: 100,
MAX_ROWS_PER_TABLE: 10000,
MAX_ROW_SIZE_BYTES: 100 * 1024, // 100KB
MAX_COLUMNS_PER_TABLE: 50,
MAX_TABLE_NAME_LENGTH: 50,
MAX_COLUMN_NAME_LENGTH: 50,
MAX_STRING_VALUE_LENGTH: 10000,
MAX_DESCRIPTION_LENGTH: 500,
DEFAULT_QUERY_LIMIT: 100,
MAX_QUERY_LIMIT: 1000,
/** Batch size for bulk update operations */
UPDATE_BATCH_SIZE: 100,
/** Batch size for bulk delete operations */
DELETE_BATCH_SIZE: 1000,
/** Maximum rows per batch insert */
MAX_BATCH_INSERT_SIZE: 1000,
/** Maximum rows per bulk update/delete operation */
MAX_BULK_OPERATION_SIZE: 1000,
} as const
/**
* Plan-based table limits.
*/
export const TABLE_PLAN_LIMITS = {
free: {
maxTables: 3,
maxRowsPerTable: 1000,
},
pro: {
maxTables: 25,
maxRowsPerTable: 5000,
},
team: {
maxTables: 100,
maxRowsPerTable: 10000,
},
enterprise: {
maxTables: 10000,
maxRowsPerTable: 1000000,
},
} as const
export type PlanName = keyof typeof TABLE_PLAN_LIMITS
export interface TablePlanLimits {
maxTables: number
maxRowsPerTable: number
}
export const COLUMN_TYPES = ['string', 'number', 'boolean', 'date', 'json'] as const
export const NAME_PATTERN = /^[a-z_][a-z0-9_]*$/i
export const USER_TABLE_ROWS_SQL_NAME = 'user_table_rows'

View File

@@ -0,0 +1 @@
export * from './use-table-columns'

View File

@@ -0,0 +1,36 @@
import { useMemo } from 'react'
import { useTable } from '@/hooks/queries/tables'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import type { ColumnOption } from '../types'
interface UseTableColumnsOptions {
tableId: string | null | undefined
includeBuiltIn?: boolean
}
/** Fetches table schema columns as dropdown options. */
export function useTableColumns({ tableId, includeBuiltIn = false }: UseTableColumnsOptions) {
const workspaceId = useWorkflowRegistry((state) => state.hydration.workspaceId)
const { data: tableData } = useTable(workspaceId ?? undefined, tableId ?? undefined)
const schemaColumns = useMemo<ColumnOption[]>(
() =>
(tableData?.schema?.columns || []).map((col) => ({
value: col.name,
label: col.name,
})),
[tableData]
)
return useMemo(() => {
if (includeBuiltIn) {
const builtInCols = [
{ value: 'createdAt', label: 'createdAt' },
{ value: 'updatedAt', label: 'updatedAt' },
]
return [...schemaColumns, ...builtInCols]
}
return schemaColumns
}, [includeBuiltIn, schemaColumns])
}

View File

@@ -0,0 +1,15 @@
/**
* Table utilities module.
*
* Hooks are not re-exported here to avoid pulling React into server code.
* Import hooks directly from '@/lib/table/hooks' in client components.
*/
export * from './billing'
export * from './constants'
export * from './llm'
export * from './query-builder'
export * from './service'
export * from './sql'
export * from './types'
export * from './validation'

View File

@@ -0,0 +1,201 @@
/**
* LLM tool enrichment utilities for table operations.
*
* Provides functions to enrich tool descriptions and parameter schemas
* with table-specific information so LLMs can construct proper queries.
*/
import type { TableSummary } from '../types'
/**
* Operations that use filters and need filter-specific enrichment.
*/
export const FILTER_OPERATIONS = new Set([
'table_query_rows',
'table_update_rows_by_filter',
'table_delete_rows_by_filter',
])
/**
* Operations that need column info for data construction.
*/
export const DATA_OPERATIONS = new Set([
'table_insert_row',
'table_batch_insert_rows',
'table_upsert_row',
'table_update_row',
])
/**
* Enriches a table tool description with table information based on the operation type.
*/
export function enrichTableToolDescription(
originalDescription: string,
table: TableSummary,
toolId: string
): string {
if (!table.columns || table.columns.length === 0) {
return originalDescription
}
const columnList = table.columns.map((col) => ` - ${col.name} (${col.type})`).join('\n')
if (FILTER_OPERATIONS.has(toolId)) {
const stringCols = table.columns.filter((c) => c.type === 'string')
const numberCols = table.columns.filter((c) => c.type === 'number')
let filterExample = ''
if (stringCols.length > 0 && numberCols.length > 0) {
filterExample = `
Example filter: {"${stringCols[0].name}": {"$eq": "value"}, "${numberCols[0].name}": {"$lt": 50}}`
} else if (stringCols.length > 0) {
filterExample = `
Example filter: {"${stringCols[0].name}": {"$eq": "value"}}`
}
let sortExample = ''
if (toolId === 'table_query_rows' && numberCols.length > 0) {
sortExample = `
Example sort: {"${numberCols[0].name}": "desc"} for highest first, {"${numberCols[0].name}": "asc"} for lowest first`
}
const queryInstructions =
toolId === 'table_query_rows'
? `
INSTRUCTIONS:
1. ALWAYS include a filter based on the user's question - queries without filters will fail
2. Construct the filter yourself from the user's question - do NOT ask for confirmation
3. Use exact match ($eq) by default unless the user specifies otherwise
4. For ranking queries (highest, lowest, Nth, top N):
- ALWAYS use sort with the relevant column (e.g., {"salary": "desc"} for highest salary)
- Use limit to get only the needed rows (e.g., limit=1 for highest, limit=2 for second highest)
- For "second highest X", use sort: {"X": "desc"} with limit: 2, then take the second result
5. Only use limit=1000 when you need ALL matching rows`
: `
INSTRUCTIONS:
1. ALWAYS include a filter based on the user's question - queries without filters will fail
2. Construct the filter yourself from the user's question - do NOT ask for confirmation
3. Use exact match ($eq) by default unless the user specifies otherwise`
return `${originalDescription}
${queryInstructions}
Table "${table.name}" columns:
${columnList}
${filterExample}${sortExample}`
}
if (DATA_OPERATIONS.has(toolId)) {
const exampleCols = table.columns.slice(0, 3)
const dataExample = exampleCols.reduce(
(obj, col) => {
obj[col.name] = col.type === 'number' ? 123 : col.type === 'boolean' ? true : 'example'
return obj
},
{} as Record<string, unknown>
)
if (toolId === 'table_update_row') {
return `${originalDescription}
Table "${table.name}" available columns:
${columnList}
For updates, only include the fields you want to change. Example: {"${exampleCols[0]?.name || 'field'}": "new_value"}`
}
return `${originalDescription}
Table "${table.name}" available columns:
${columnList}
Pass the "data" parameter with an object like: ${JSON.stringify(dataExample)}`
}
return `${originalDescription}
Table "${table.name}" columns:
${columnList}`
}
/**
* Enriches LLM tool parameters with table-specific information.
*/
export function enrichTableToolParameters(
llmSchema: { properties?: Record<string, any>; required?: string[] },
table: TableSummary,
toolId: string
): { properties: Record<string, any>; required: string[] } {
if (!table.columns || table.columns.length === 0) {
return {
properties: llmSchema.properties || {},
required: llmSchema.required || [],
}
}
const columnNames = table.columns.map((c) => c.name).join(', ')
const enrichedProperties = { ...llmSchema.properties }
const enrichedRequired = llmSchema.required ? [...llmSchema.required] : []
if (enrichedProperties.filter && FILTER_OPERATIONS.has(toolId)) {
enrichedProperties.filter = {
...enrichedProperties.filter,
description: `REQUIRED - query will fail without a filter. Construct filter from user's question using columns: ${columnNames}. Syntax: {"column": {"$eq": "value"}}`,
}
}
if (FILTER_OPERATIONS.has(toolId) && !enrichedRequired.includes('filter')) {
enrichedRequired.push('filter')
}
if (enrichedProperties.sort && toolId === 'table_query_rows') {
enrichedProperties.sort = {
...enrichedProperties.sort,
description: `Sort order as {field: "asc"|"desc"}. REQUIRED for ranking queries (highest, lowest, Nth). Example: {"salary": "desc"} for highest salary first.`,
}
}
if (enrichedProperties.limit && toolId === 'table_query_rows') {
enrichedProperties.limit = {
...enrichedProperties.limit,
description: `Maximum rows to return (min: 1, max: 1000, default: 100). For ranking queries: use limit=1 for highest/lowest, limit=2 for second highest, etc.`,
}
}
if (enrichedProperties.data && DATA_OPERATIONS.has(toolId)) {
const exampleCols = table.columns.slice(0, 2)
const exampleData = exampleCols.reduce(
(obj: Record<string, unknown>, col: { name: string; type: string }) => {
obj[col.name] = col.type === 'number' ? 123 : col.type === 'boolean' ? true : 'value'
return obj
},
{} as Record<string, unknown>
)
if (toolId === 'table_update_row') {
enrichedProperties.data = {
...enrichedProperties.data,
description: `Object containing fields to update. Only include fields you want to change. Available columns: ${columnNames}`,
}
} else {
enrichedProperties.data = {
...enrichedProperties.data,
description: `REQUIRED object containing row values. Use columns: ${columnNames}. Example value: ${JSON.stringify(exampleData)}`,
}
}
}
if (enrichedProperties.rows && toolId === 'table_batch_insert_rows') {
enrichedProperties.rows = {
...enrichedProperties.rows,
description: `REQUIRED. Array of row objects. Each object uses columns: ${columnNames}`,
}
}
return {
properties: enrichedProperties,
required: enrichedRequired,
}
}

View File

@@ -0,0 +1 @@
export * from './enrichment'

View File

@@ -0,0 +1,60 @@
/**
* Wand enricher for table schema context.
*/
import { db } from '@sim/db'
import { userTableDefinitions } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { TableSchema } from '../types'
const logger = createLogger('TableWandEnricher')
/**
* Wand enricher that provides table schema context.
* Used by the wand API to inject table column information into the system prompt.
*/
export async function enrichTableSchema(
workspaceId: string | null,
context: Record<string, unknown>
): Promise<string | null> {
const tableId = context.tableId as string | undefined
if (!tableId || !workspaceId) {
return null
}
try {
const [table] = await db
.select({
name: userTableDefinitions.name,
schema: userTableDefinitions.schema,
})
.from(userTableDefinitions)
.where(
and(eq(userTableDefinitions.id, tableId), eq(userTableDefinitions.workspaceId, workspaceId))
)
.limit(1)
if (!table) {
return null
}
const schema = table.schema as TableSchema | null
if (!schema?.columns?.length) {
return null
}
const columnLines = schema.columns
.map((col) => {
const flags = [col.type, col.required && 'required', col.unique && 'unique'].filter(Boolean)
return `- ${col.name} (${flags.join(', ')})`
})
.join('\n')
const label = table.name ? `${table.name} (${tableId})` : tableId
return `Table schema for ${label}:\n${columnLines}\nBuilt-in columns: createdAt, updatedAt`
} catch (error) {
logger.debug('Failed to fetch table schema', { tableId, error })
return null
}
}

View File

@@ -0,0 +1,26 @@
/**
* Constants for table query builder UI (filtering and sorting).
*/
export type { FilterRule, SortRule } from '../types'
export const COMPARISON_OPERATORS = [
{ value: 'eq', label: 'equals' },
{ value: 'ne', label: 'not equals' },
{ value: 'gt', label: 'greater than' },
{ value: 'gte', label: 'greater or equal' },
{ value: 'lt', label: 'less than' },
{ value: 'lte', label: 'less or equal' },
{ value: 'contains', label: 'contains' },
{ value: 'in', label: 'in array' },
] as const
export const LOGICAL_OPERATORS = [
{ value: 'and', label: 'and' },
{ value: 'or', label: 'or' },
] as const
export const SORT_DIRECTIONS = [
{ value: 'asc', label: 'ascending' },
{ value: 'desc', label: 'descending' },
] as const

View File

@@ -0,0 +1,167 @@
/**
* Converters for transforming between UI builder state and API filter/sort objects.
*/
import { nanoid } from 'nanoid'
import type { Filter, FilterRule, JsonValue, Sort, SortDirection, SortRule } from '../types'
/** Converts UI filter rules to a Filter object for API queries. */
export function filterRulesToFilter(rules: FilterRule[]): Filter | null {
if (rules.length === 0) return null
const orGroups: Filter[] = []
let currentGroup: Filter = {}
for (const rule of rules) {
const isOr = rule.logicalOperator === 'or'
const ruleValue = toRuleValue(rule.operator, rule.value)
if (isOr && Object.keys(currentGroup).length > 0) {
orGroups.push({ ...currentGroup })
currentGroup = {}
}
currentGroup[rule.column] = ruleValue as Filter[string]
}
if (Object.keys(currentGroup).length > 0) {
orGroups.push(currentGroup)
}
return orGroups.length > 1 ? { $or: orGroups } : orGroups[0] || null
}
/** Converts a Filter object back to UI filter rules. */
export function filterToRules(filter: Filter | null): FilterRule[] {
if (!filter) return []
if (filter.$or && Array.isArray(filter.$or)) {
const groups = filter.$or
.map((orGroup) => parseFilterGroup(orGroup as Filter))
.filter((group) => group.length > 0)
return applyLogicalOperators(groups)
}
return parseFilterGroup(filter)
}
/** Converts a single UI sort rule to a Sort object for API queries. */
export function sortRuleToSort(rule: SortRule | null): Sort | null {
if (!rule || !rule.column) return null
return { [rule.column]: rule.direction }
}
/** Converts multiple UI sort rules to a Sort object. */
export function sortRulesToSort(rules: SortRule[]): Sort | null {
if (rules.length === 0) return null
const sort: Sort = {}
for (const rule of rules) {
if (rule.column) {
sort[rule.column] = rule.direction
}
}
return Object.keys(sort).length > 0 ? sort : null
}
/** Converts a Sort object back to UI sort rules. */
export function sortToRules(sort: Sort | null): SortRule[] {
if (!sort) return []
return Object.entries(sort).map(([column, direction]) => ({
id: nanoid(),
column,
direction: normalizeSortDirection(direction),
}))
}
function toRuleValue(operator: string, value: string): JsonValue {
const parsedValue = parseValue(value, operator)
return operator === 'eq' ? parsedValue : { [`$${operator}`]: parsedValue }
}
function applyLogicalOperators(groups: FilterRule[][]): FilterRule[] {
const rules: FilterRule[] = []
groups.forEach((group, groupIndex) => {
group.forEach((rule, ruleIndex) => {
rules.push({
...rule,
logicalOperator:
groupIndex === 0 && ruleIndex === 0
? 'and'
: groupIndex > 0 && ruleIndex === 0
? 'or'
: 'and',
})
})
})
return rules
}
function parseValue(value: string, operator: string): JsonValue {
if (operator === 'in') {
return value
.split(',')
.map((part) => part.trim())
.map((part) => parseScalar(part))
}
return parseScalar(value)
}
function parseScalar(value: string): JsonValue {
if (value === 'true') return true
if (value === 'false') return false
if (value === 'null') return null
if (!Number.isNaN(Number(value)) && value !== '') return Number(value)
return value
}
function parseFilterGroup(group: Filter): FilterRule[] {
if (!group || typeof group !== 'object' || Array.isArray(group)) return []
const rules: FilterRule[] = []
for (const [column, value] of Object.entries(group)) {
if (column === '$or' || column === '$and') continue
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
for (const [op, opValue] of Object.entries(value)) {
if (op.startsWith('$')) {
rules.push({
id: nanoid(),
logicalOperator: 'and',
column,
operator: op.substring(1),
value: formatValueForBuilder(opValue as JsonValue),
})
}
}
continue
}
rules.push({
id: nanoid(),
logicalOperator: 'and',
column,
operator: 'eq',
value: formatValueForBuilder(value as JsonValue),
})
}
return rules
}
function formatValueForBuilder(value: JsonValue): string {
if (value === null) return 'null'
if (typeof value === 'boolean') return String(value)
if (Array.isArray(value)) return value.map(formatValueForBuilder).join(', ')
return String(value)
}
function normalizeSortDirection(direction: string): SortDirection {
return direction === 'desc' ? 'desc' : 'asc'
}

View File

@@ -0,0 +1,7 @@
/**
* Query builder UI utilities for filtering and sorting tables.
*/
export * from './constants'
export * from './converters'
export * from './use-query-builder'

View File

@@ -0,0 +1,161 @@
/**
* Hooks for query builder UI state management (filters and sorting).
*/
import { useCallback, useMemo } from 'react'
import { nanoid } from 'nanoid'
import type { ColumnOption } from '../types'
import {
COMPARISON_OPERATORS,
type FilterRule,
LOGICAL_OPERATORS,
SORT_DIRECTIONS,
type SortRule,
} from './constants'
export type { ColumnOption }
/** Manages filter rule state with add/remove/update operations. */
export function useFilterBuilder({
columns,
rules,
setRules,
isReadOnly = false,
}: UseFilterBuilderProps): UseFilterBuilderReturn {
const comparisonOptions = useMemo(
() => COMPARISON_OPERATORS.map((op) => ({ value: op.value, label: op.label })),
[]
)
const logicalOptions = useMemo(
() => LOGICAL_OPERATORS.map((op) => ({ value: op.value, label: op.label })),
[]
)
const sortDirectionOptions = useMemo(
() => SORT_DIRECTIONS.map((d) => ({ value: d.value, label: d.label })),
[]
)
const createDefaultRule = useCallback((): FilterRule => {
return {
id: nanoid(),
logicalOperator: 'and',
column: columns[0]?.value || '',
operator: 'eq',
value: '',
}
}, [columns])
const addRule = useCallback(() => {
if (isReadOnly) return
setRules([...rules, createDefaultRule()])
}, [isReadOnly, rules, setRules, createDefaultRule])
const removeRule = useCallback(
(id: string) => {
if (isReadOnly) return
setRules(rules.filter((r) => r.id !== id))
},
[isReadOnly, rules, setRules]
)
const updateRule = useCallback(
(id: string, field: keyof FilterRule, value: string) => {
if (isReadOnly) return
setRules(rules.map((r) => (r.id === id ? { ...r, [field]: value } : r)))
},
[isReadOnly, rules, setRules]
)
return {
comparisonOptions,
logicalOptions,
sortDirectionOptions,
addRule,
removeRule,
updateRule,
createDefaultRule,
}
}
/** Manages sort rule state with add/remove/update operations. */
export function useSortBuilder({
columns,
sortRule,
setSortRule,
}: UseSortBuilderProps): UseSortBuilderReturn {
const sortDirectionOptions = useMemo(
() => SORT_DIRECTIONS.map((d) => ({ value: d.value, label: d.label })),
[]
)
const addSort = useCallback(() => {
setSortRule({
id: nanoid(),
column: columns[0]?.value || '',
direction: 'asc',
})
}, [columns, setSortRule])
const removeSort = useCallback(() => {
setSortRule(null)
}, [setSortRule])
const updateSortColumn = useCallback(
(column: string) => {
if (sortRule) {
setSortRule({ ...sortRule, column })
}
},
[sortRule, setSortRule]
)
const updateSortDirection = useCallback(
(direction: 'asc' | 'desc') => {
if (sortRule) {
setSortRule({ ...sortRule, direction })
}
},
[sortRule, setSortRule]
)
return {
sortDirectionOptions,
addSort,
removeSort,
updateSortColumn,
updateSortDirection,
}
}
export interface UseFilterBuilderProps {
columns: ColumnOption[]
rules: FilterRule[]
setRules: (rules: FilterRule[]) => void
isReadOnly?: boolean
}
export interface UseFilterBuilderReturn {
comparisonOptions: ColumnOption[]
logicalOptions: ColumnOption[]
sortDirectionOptions: ColumnOption[]
addRule: () => void
removeRule: (id: string) => void
updateRule: (id: string, field: keyof FilterRule, value: string) => void
createDefaultRule: () => FilterRule
}
export interface UseSortBuilderProps {
columns: ColumnOption[]
sortRule: SortRule | null
setSortRule: (sort: SortRule | null) => void
}
export interface UseSortBuilderReturn {
sortDirectionOptions: ColumnOption[]
addSort: () => void
removeSort: () => void
updateSortColumn: (column: string) => void
updateSortDirection: (direction: 'asc' | 'desc') => void
}

View File

@@ -0,0 +1,691 @@
/**
* Table service layer for internal programmatic access.
*
* Use this for: workflow executor, background jobs, testing business logic.
* Use API routes for: HTTP requests, frontend clients.
*
* Note: API routes have their own implementations for HTTP-specific concerns.
*/
import { db } from '@sim/db'
import { userTableDefinitions, userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, count, eq, sql } from 'drizzle-orm'
import { TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME } from './constants'
import { buildFilterClause, buildSortClause } from './sql'
import type {
BatchInsertData,
BulkDeleteData,
BulkOperationResult,
BulkUpdateData,
CreateTableData,
InsertRowData,
QueryOptions,
QueryResult,
RowData,
TableDefinition,
TableRow,
TableSchema,
UpdateRowData,
} from './types'
import {
checkBatchUniqueConstraintsDb,
checkUniqueConstraintsDb,
getUniqueColumns,
validateRowAgainstSchema,
validateRowSize,
validateTableName,
validateTableSchema,
} from './validation'
const logger = createLogger('TableService')
/**
* Gets a table by ID with full details.
*
* @param tableId - Table ID to fetch
* @returns Table definition or null if not found
*/
export async function getTableById(tableId: string): Promise<TableDefinition | null> {
const results = await db
.select()
.from(userTableDefinitions)
.where(eq(userTableDefinitions.id, tableId))
.limit(1)
if (results.length === 0) return null
const table = results[0]
return {
id: table.id,
name: table.name,
description: table.description,
schema: table.schema as TableSchema,
rowCount: table.rowCount,
maxRows: table.maxRows,
workspaceId: table.workspaceId,
createdBy: table.createdBy,
createdAt: table.createdAt,
updatedAt: table.updatedAt,
}
}
/**
* Lists all tables in a workspace.
*
* @param workspaceId - Workspace ID to list tables for
* @returns Array of table definitions
*/
export async function listTables(workspaceId: string): Promise<TableDefinition[]> {
const tables = await db
.select()
.from(userTableDefinitions)
.where(eq(userTableDefinitions.workspaceId, workspaceId))
.orderBy(userTableDefinitions.createdAt)
return tables.map((t) => ({
id: t.id,
name: t.name,
description: t.description,
schema: t.schema as TableSchema,
rowCount: t.rowCount,
maxRows: t.maxRows,
workspaceId: t.workspaceId,
createdBy: t.createdBy,
createdAt: t.createdAt,
updatedAt: t.updatedAt,
}))
}
/**
* Creates a new table.
*
* @param data - Table creation data
* @param requestId - Request ID for logging
* @returns Created table definition
* @throws Error if validation fails or limits exceeded
*/
export async function createTable(
data: CreateTableData,
requestId: string
): Promise<TableDefinition> {
// Validate table name
const nameValidation = validateTableName(data.name)
if (!nameValidation.valid) {
throw new Error(`Invalid table name: ${nameValidation.errors.join(', ')}`)
}
// Validate schema
const schemaValidation = validateTableSchema(data.schema)
if (!schemaValidation.valid) {
throw new Error(`Invalid schema: ${schemaValidation.errors.join(', ')}`)
}
// Check workspace table limit
const existingCount = await db
.select({ count: count() })
.from(userTableDefinitions)
.where(eq(userTableDefinitions.workspaceId, data.workspaceId))
if (existingCount[0].count >= TABLE_LIMITS.MAX_TABLES_PER_WORKSPACE) {
throw new Error(
`Workspace has reached maximum table limit (${TABLE_LIMITS.MAX_TABLES_PER_WORKSPACE})`
)
}
// Check for duplicate name
const duplicateName = await db
.select({ id: userTableDefinitions.id })
.from(userTableDefinitions)
.where(
and(
eq(userTableDefinitions.workspaceId, data.workspaceId),
eq(userTableDefinitions.name, data.name)
)
)
.limit(1)
if (duplicateName.length > 0) {
throw new Error(`Table with name "${data.name}" already exists in this workspace`)
}
const tableId = `tbl_${crypto.randomUUID().replace(/-/g, '')}`
const now = new Date()
// Use provided maxRows (from billing plan) or fall back to default
const maxRows = data.maxRows ?? TABLE_LIMITS.MAX_ROWS_PER_TABLE
const newTable = {
id: tableId,
name: data.name,
description: data.description ?? null,
schema: data.schema,
workspaceId: data.workspaceId,
createdBy: data.userId,
maxRows,
createdAt: now,
updatedAt: now,
}
await db.insert(userTableDefinitions).values(newTable)
logger.info(`[${requestId}] Created table ${tableId} in workspace ${data.workspaceId}`)
return {
id: newTable.id,
name: newTable.name,
description: newTable.description,
schema: newTable.schema as TableSchema,
rowCount: 0,
maxRows: newTable.maxRows,
workspaceId: newTable.workspaceId,
createdBy: newTable.createdBy,
createdAt: newTable.createdAt,
updatedAt: newTable.updatedAt,
}
}
/**
* Deletes a table (hard delete).
*
* @param tableId - Table ID to delete
* @param requestId - Request ID for logging
*/
export async function deleteTable(tableId: string, requestId: string): Promise<void> {
await db.transaction(async (trx) => {
await trx.delete(userTableRows).where(eq(userTableRows.tableId, tableId))
await trx.delete(userTableDefinitions).where(eq(userTableDefinitions.id, tableId))
})
logger.info(`[${requestId}] Deleted table ${tableId}`)
}
/**
* Inserts a single row into a table.
*
* @param data - Row insertion data
* @param table - Table definition (to avoid re-fetching)
* @param requestId - Request ID for logging
* @returns Inserted row
* @throws Error if validation fails or capacity exceeded
*/
export async function insertRow(
data: InsertRowData,
table: TableDefinition,
requestId: string
): Promise<TableRow> {
// Check capacity using stored rowCount (maintained by database triggers)
if (table.rowCount >= table.maxRows) {
throw new Error(`Table has reached maximum row limit (${table.maxRows})`)
}
// Validate row size
const sizeValidation = validateRowSize(data.data)
if (!sizeValidation.valid) {
throw new Error(sizeValidation.errors.join(', '))
}
// Validate against schema
const schemaValidation = validateRowAgainstSchema(data.data, table.schema)
if (!schemaValidation.valid) {
throw new Error(`Schema validation failed: ${schemaValidation.errors.join(', ')}`)
}
// Check unique constraints using optimized database query
const uniqueColumns = getUniqueColumns(table.schema)
if (uniqueColumns.length > 0) {
const uniqueValidation = await checkUniqueConstraintsDb(data.tableId, data.data, table.schema)
if (!uniqueValidation.valid) {
throw new Error(uniqueValidation.errors.join(', '))
}
}
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
const now = new Date()
const newRow = {
id: rowId,
tableId: data.tableId,
workspaceId: data.workspaceId,
data: data.data,
createdAt: now,
updatedAt: now,
}
await db.insert(userTableRows).values(newRow)
logger.info(`[${requestId}] Inserted row ${rowId} into table ${data.tableId}`)
return {
id: newRow.id,
data: newRow.data as RowData,
createdAt: newRow.createdAt,
updatedAt: newRow.updatedAt,
}
}
/**
* Inserts multiple rows into a table.
*
* @param data - Batch insertion data
* @param table - Table definition
* @param requestId - Request ID for logging
* @returns Array of inserted rows
* @throws Error if validation fails or capacity exceeded
*/
export async function batchInsertRows(
data: BatchInsertData,
table: TableDefinition,
requestId: string
): Promise<TableRow[]> {
// Check capacity using stored rowCount (maintained by database triggers)
const remainingCapacity = table.maxRows - table.rowCount
if (remainingCapacity < data.rows.length) {
throw new Error(
`Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`
)
}
// Validate all rows
for (let i = 0; i < data.rows.length; i++) {
const row = data.rows[i]
const sizeValidation = validateRowSize(row)
if (!sizeValidation.valid) {
throw new Error(`Row ${i + 1}: ${sizeValidation.errors.join(', ')}`)
}
const schemaValidation = validateRowAgainstSchema(row, table.schema)
if (!schemaValidation.valid) {
throw new Error(`Row ${i + 1}: ${schemaValidation.errors.join(', ')}`)
}
}
// Check unique constraints across all rows using optimized database query
const uniqueColumns = getUniqueColumns(table.schema)
if (uniqueColumns.length > 0) {
const uniqueResult = await checkBatchUniqueConstraintsDb(data.tableId, data.rows, table.schema)
if (!uniqueResult.valid) {
// Format errors for batch insert
const errorMessages = uniqueResult.errors
.map((e) => `Row ${e.row + 1}: ${e.errors.join(', ')}`)
.join('; ')
throw new Error(errorMessages)
}
}
const now = new Date()
const rowsToInsert = data.rows.map((rowData) => ({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId: data.tableId,
workspaceId: data.workspaceId,
data: rowData,
createdAt: now,
updatedAt: now,
}))
await db.insert(userTableRows).values(rowsToInsert)
logger.info(`[${requestId}] Batch inserted ${data.rows.length} rows into table ${data.tableId}`)
return rowsToInsert.map((r) => ({
id: r.id,
data: r.data as RowData,
createdAt: r.createdAt,
updatedAt: r.updatedAt,
}))
}
/**
* Queries rows from a table with filtering, sorting, and pagination.
*
* @param tableId - Table ID to query
* @param workspaceId - Workspace ID for access control
* @param options - Query options (filter, sort, limit, offset)
* @param requestId - Request ID for logging
* @returns Query result with rows and pagination info
*/
export async function queryRows(
tableId: string,
workspaceId: string,
options: QueryOptions,
requestId: string
): Promise<QueryResult> {
const { filter, sort, limit = TABLE_LIMITS.DEFAULT_QUERY_LIMIT, offset = 0 } = options
const tableName = USER_TABLE_ROWS_SQL_NAME
// Build WHERE clause
const baseConditions = and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, workspaceId)
)
let whereClause = baseConditions
if (filter && Object.keys(filter).length > 0) {
const filterClause = buildFilterClause(filter, tableName)
if (filterClause) {
whereClause = and(baseConditions, filterClause)
}
}
// Get total count
const countResult = await db
.select({ count: count() })
.from(userTableRows)
.where(whereClause ?? baseConditions)
const totalCount = Number(countResult[0].count)
// Build ORDER BY clause
let orderByClause
if (sort && Object.keys(sort).length > 0) {
orderByClause = buildSortClause(sort, tableName)
}
// Execute query
let query = db
.select()
.from(userTableRows)
.where(whereClause ?? baseConditions)
if (orderByClause) {
query = query.orderBy(orderByClause) as typeof query
}
const rows = await query.limit(limit).offset(offset)
logger.info(
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
)
return {
rows: rows.map((r) => ({
id: r.id,
data: r.data as RowData,
createdAt: r.createdAt,
updatedAt: r.updatedAt,
})),
rowCount: rows.length,
totalCount,
limit,
offset,
}
}
/**
* Gets a single row by ID.
*
* @param tableId - Table ID
* @param rowId - Row ID to fetch
* @param workspaceId - Workspace ID for access control
* @returns Row or null if not found
*/
export async function getRowById(
tableId: string,
rowId: string,
workspaceId: string
): Promise<TableRow | null> {
const results = await db
.select()
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, workspaceId)
)
)
.limit(1)
if (results.length === 0) return null
const row = results[0]
return {
id: row.id,
data: row.data as RowData,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}
}
/**
* Updates a single row.
*
* @param data - Update data
* @param table - Table definition
* @param requestId - Request ID for logging
* @returns Updated row
* @throws Error if row not found or validation fails
*/
export async function updateRow(
data: UpdateRowData,
table: TableDefinition,
requestId: string
): Promise<TableRow> {
// Get existing row
const existingRow = await getRowById(data.tableId, data.rowId, data.workspaceId)
if (!existingRow) {
throw new Error('Row not found')
}
// Validate size
const sizeValidation = validateRowSize(data.data)
if (!sizeValidation.valid) {
throw new Error(sizeValidation.errors.join(', '))
}
// Validate against schema
const schemaValidation = validateRowAgainstSchema(data.data, table.schema)
if (!schemaValidation.valid) {
throw new Error(`Schema validation failed: ${schemaValidation.errors.join(', ')}`)
}
// Check unique constraints using optimized database query
const uniqueColumns = getUniqueColumns(table.schema)
if (uniqueColumns.length > 0) {
const uniqueValidation = await checkUniqueConstraintsDb(
data.tableId,
data.data,
table.schema,
data.rowId // Exclude current row
)
if (!uniqueValidation.valid) {
throw new Error(uniqueValidation.errors.join(', '))
}
}
const now = new Date()
await db
.update(userTableRows)
.set({ data: data.data, updatedAt: now })
.where(eq(userTableRows.id, data.rowId))
logger.info(`[${requestId}] Updated row ${data.rowId} in table ${data.tableId}`)
return {
id: data.rowId,
data: data.data,
createdAt: existingRow.createdAt,
updatedAt: now,
}
}
/**
* Deletes a single row (hard delete).
*
* @param tableId - Table ID
* @param rowId - Row ID to delete
* @param workspaceId - Workspace ID for access control
* @param requestId - Request ID for logging
* @throws Error if row not found
*/
export async function deleteRow(
tableId: string,
rowId: string,
workspaceId: string,
requestId: string
): Promise<void> {
const existingRow = await getRowById(tableId, rowId, workspaceId)
if (!existingRow) {
throw new Error('Row not found')
}
await db.delete(userTableRows).where(eq(userTableRows.id, rowId))
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
}
/**
* Updates multiple rows matching a filter.
*
* @param data - Bulk update data
* @param table - Table definition
* @param requestId - Request ID for logging
* @returns Bulk operation result
*/
export async function updateRowsByFilter(
data: BulkUpdateData,
table: TableDefinition,
requestId: string
): Promise<BulkOperationResult> {
const tableName = USER_TABLE_ROWS_SQL_NAME
// Build filter clause
const filterClause = buildFilterClause(data.filter, tableName)
if (!filterClause) {
throw new Error('Filter is required for bulk update')
}
// Find matching rows
const baseConditions = and(
eq(userTableRows.tableId, data.tableId),
eq(userTableRows.workspaceId, data.workspaceId)
)
let query = db
.select({ id: userTableRows.id, data: userTableRows.data })
.from(userTableRows)
.where(and(baseConditions, filterClause))
if (data.limit) {
query = query.limit(data.limit) as typeof query
}
const matchingRows = await query
if (matchingRows.length === 0) {
return { affectedCount: 0, affectedRowIds: [] }
}
// Validate merged data for each row
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...data.data }
const sizeValidation = validateRowSize(mergedData)
if (!sizeValidation.valid) {
throw new Error(`Row ${row.id}: ${sizeValidation.errors.join(', ')}`)
}
const schemaValidation = validateRowAgainstSchema(mergedData, table.schema)
if (!schemaValidation.valid) {
throw new Error(`Row ${row.id}: ${schemaValidation.errors.join(', ')}`)
}
}
// Update in batches
const now = new Date()
await db.transaction(async (trx) => {
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
const updatePromises = batch.map((row) => {
const existingData = row.data as RowData
return trx
.update(userTableRows)
.set({
data: { ...existingData, ...data.data },
updatedAt: now,
})
.where(eq(userTableRows.id, row.id))
})
await Promise.all(updatePromises)
}
})
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${data.tableId}`)
return {
affectedCount: matchingRows.length,
affectedRowIds: matchingRows.map((r) => r.id),
}
}
/**
* Deletes multiple rows matching a filter.
*
* @param data - Bulk delete data
* @param requestId - Request ID for logging
* @returns Bulk operation result
*/
export async function deleteRowsByFilter(
data: BulkDeleteData,
requestId: string
): Promise<BulkOperationResult> {
const tableName = USER_TABLE_ROWS_SQL_NAME
// Build filter clause
const filterClause = buildFilterClause(data.filter, tableName)
if (!filterClause) {
throw new Error('Filter is required for bulk delete')
}
// Find matching rows
const baseConditions = and(
eq(userTableRows.tableId, data.tableId),
eq(userTableRows.workspaceId, data.workspaceId)
)
let query = db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(and(baseConditions, filterClause))
if (data.limit) {
query = query.limit(data.limit) as typeof query
}
const matchingRows = await query
if (matchingRows.length === 0) {
return { affectedCount: 0, affectedRowIds: [] }
}
const rowIds = matchingRows.map((r) => r.id)
// Delete in batches
await db.transaction(async (trx) => {
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
await trx.delete(userTableRows).where(
and(
eq(userTableRows.tableId, data.tableId),
eq(userTableRows.workspaceId, data.workspaceId),
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
batch.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
}
})
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${data.tableId}`)
return {
affectedCount: matchingRows.length,
affectedRowIds: rowIds,
}
}

369
apps/sim/lib/table/sql.ts Normal file
View File

@@ -0,0 +1,369 @@
/**
* SQL query builder utilities for user-defined tables.
*
* Uses JSONB containment operator (@>) for equality to leverage GIN index.
* Uses text extraction (->>) for comparisons and pattern matching.
*/
import type { SQL } from 'drizzle-orm'
import { sql } from 'drizzle-orm'
import { NAME_PATTERN } from './constants'
import type { ColumnDefinition, ConditionOperators, Filter, JsonValue, Sort } from './types'
/**
* Whitelist of allowed operators for query filtering.
* Only these operators can be used in filter conditions.
*/
const ALLOWED_OPERATORS = new Set([
'$eq',
'$ne',
'$gt',
'$gte',
'$lt',
'$lte',
'$in',
'$nin',
'$contains',
])
/**
* Builds a WHERE clause from a filter object.
* Recursively processes logical operators ($or, $and) and field conditions.
*
* @param filter - Filter object with field conditions and logical operators
* @param tableName - Table name for the query (e.g., 'user_table_rows')
* @returns SQL WHERE clause or undefined if no filter specified
* @throws Error if field name is invalid or operator is not allowed
*
* @example
* // Simple equality
* buildFilterClause({ name: 'John' }, 'user_table_rows')
*
* // Complex filter with operators
* buildFilterClause({ age: { $gte: 18 }, status: { $in: ['active', 'pending'] } }, 'user_table_rows')
*
* // Logical operators
* buildFilterClause({ $or: [{ status: 'active' }, { verified: true }] }, 'user_table_rows')
*/
export function buildFilterClause(filter: Filter, tableName: string): SQL | undefined {
const conditions: SQL[] = []
for (const [field, condition] of Object.entries(filter)) {
if (condition === undefined) {
continue
}
// This represents a case where the filter is a logical OR of multiple filters
// e.g. { $or: [{ status: 'active' }, { status: 'pending' }] }
if (field === '$or' && Array.isArray(condition)) {
const orClause = buildLogicalClause(condition as Filter[], tableName, 'OR')
if (orClause) {
conditions.push(orClause)
}
continue
}
// This represents a case where the filter is a logical AND of multiple filters
// e.g. { $and: [{ status: 'active' }, { status: 'pending' }] }
if (field === '$and' && Array.isArray(condition)) {
const andClause = buildLogicalClause(condition as Filter[], tableName, 'AND')
if (andClause) {
conditions.push(andClause)
}
continue
}
// Skip arrays for regular fields - arrays are only valid for $or and $and.
// If we encounter an array here, it's likely malformed input (e.g., { name: [filter1, filter2] })
// which doesn't have a clear semantic meaning, so we skip it.
if (Array.isArray(condition)) {
continue
}
// Build SQL conditions for this field. Returns array of SQL fragments for each operator.
const fieldConditions = buildFieldCondition(
tableName,
field,
condition as JsonValue | ConditionOperators
)
conditions.push(...fieldConditions)
}
if (conditions.length === 0) return undefined
if (conditions.length === 1) return conditions[0]
return sql.join(conditions, sql.raw(' AND '))
}
/**
* Builds an ORDER BY clause from a sort object.
*
* @param sort - Sort object with field names and directions
* @param tableName - Table name for the query (e.g., 'user_table_rows')
* @param columns - Optional column definitions for type-aware sorting
* @returns SQL ORDER BY clause or undefined if no sort specified
* @throws Error if field name is invalid
*
* @example
* buildSortClause({ name: 'asc', age: 'desc' }, 'user_table_rows')
* // Returns: ORDER BY data->>'name' ASC, data->>'age' DESC
*
* @example
* // With column types for proper numeric sorting
* buildSortClause({ salary: 'desc' }, 'user_table_rows', [{ name: 'salary', type: 'number' }])
* // Returns: ORDER BY (data->>'salary')::numeric DESC NULLS LAST
*/
export function buildSortClause(
sort: Sort,
tableName: string,
columns?: ColumnDefinition[]
): SQL | undefined {
const clauses: SQL[] = []
const columnTypeMap = new Map(columns?.map((col) => [col.name, col.type]))
for (const [field, direction] of Object.entries(sort)) {
validateFieldName(field)
if (direction !== 'asc' && direction !== 'desc') {
throw new Error(`Invalid sort direction "${direction}". Must be "asc" or "desc".`)
}
const columnType = columnTypeMap.get(field)
clauses.push(buildSortFieldClause(tableName, field, direction, columnType))
}
return clauses.length > 0 ? sql.join(clauses, sql.raw(', ')) : undefined
}
/**
* Validates a field name to prevent SQL injection.
* Field names must match the NAME_PATTERN (alphanumeric + underscore, starting with letter/underscore).
*
* @param field - The field name to validate
* @throws Error if field name is invalid
*/
function validateFieldName(field: string): void {
if (!field || typeof field !== 'string') {
throw new Error('Field name must be a non-empty string')
}
if (!NAME_PATTERN.test(field)) {
throw new Error(
`Invalid field name "${field}". Field names must start with a letter or underscore, followed by alphanumeric characters or underscores.`
)
}
}
/**
* Validates an operator to ensure it's in the allowed list.
*
* @param operator - The operator to validate
* @throws Error if operator is not allowed
*/
function validateOperator(operator: string): void {
if (!ALLOWED_OPERATORS.has(operator)) {
throw new Error(
`Invalid operator "${operator}". Allowed operators: ${Array.from(ALLOWED_OPERATORS).join(', ')}`
)
}
}
/**
* Builds SQL conditions for a single field based on the provided condition.
*
* Supports both simple equality checks (using JSONB containment) and complex
* operators like comparison, membership, and pattern matching. Field names are
* validated to prevent SQL injection, and operators are validated against an
* allowed whitelist.
*
* @param tableName - The name of the table to query (used for SQL table reference)
* @param field - The field name to filter on (must match NAME_PATTERN)
* @param condition - Either a simple value (for equality) or a ConditionOperators
* object with operators like $eq, $gt, $in, etc.
* @returns Array of SQL condition fragments. Multiple conditions are returned
* when the condition object contains multiple operators.
* @throws Error if field name is invalid or operator is not allowed
*/
function buildFieldCondition(
tableName: string,
field: string,
condition: JsonValue | ConditionOperators
): SQL[] {
validateFieldName(field)
const conditions: SQL[] = []
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
for (const [op, value] of Object.entries(condition)) {
// Validate operator to ensure only allowed operators are used
validateOperator(op)
switch (op) {
case '$eq':
conditions.push(buildContainmentClause(tableName, field, value as JsonValue))
break
case '$ne':
conditions.push(
sql`NOT (${buildContainmentClause(tableName, field, value as JsonValue)})`
)
break
case '$gt':
conditions.push(buildComparisonClause(tableName, field, '>', value as number))
break
case '$gte':
conditions.push(buildComparisonClause(tableName, field, '>=', value as number))
break
case '$lt':
conditions.push(buildComparisonClause(tableName, field, '<', value as number))
break
case '$lte':
conditions.push(buildComparisonClause(tableName, field, '<=', value as number))
break
case '$in':
if (Array.isArray(value) && value.length > 0) {
if (value.length === 1) {
// Single value then use containment clause
conditions.push(buildContainmentClause(tableName, field, value[0]))
} else {
// Multiple values then use OR clause
const inConditions = value.map((v) => buildContainmentClause(tableName, field, v))
conditions.push(sql`(${sql.join(inConditions, sql.raw(' OR '))})`)
}
}
break
case '$nin':
if (Array.isArray(value) && value.length > 0) {
const ninConditions = value.map(
(v) => sql`NOT (${buildContainmentClause(tableName, field, v)})`
)
conditions.push(sql`(${sql.join(ninConditions, sql.raw(' AND '))})`)
}
break
case '$contains':
conditions.push(buildContainsClause(tableName, field, value as string))
break
default:
// This should never happen due to validateOperator, but added for completeness
throw new Error(`Unsupported operator: ${op}`)
}
}
} else {
// Simple value (primitive or null) - shorthand for equality.
// Example: { name: 'John' } is equivalent to { name: { $eq: 'John' } }
conditions.push(buildContainmentClause(tableName, field, condition))
}
return conditions
}
/**
* Builds SQL clauses from nested filters and joins them with the specified operator.
*
* @example
* // OR operator
* buildLogicalClause(
* [{ status: 'active' }, { status: 'pending' }],
* 'user_table_rows',
* 'OR'
* )
* // Returns: (data @> '{"status":"active"}'::jsonb OR data @> '{"status":"pending"}'::jsonb)
*
* @example
* // AND operator
* buildLogicalClause(
* [{ age: { $gte: 18 } }, { verified: true }],
* 'user_table_rows',
* 'AND'
* )
* // Returns: ((data->>'age')::numeric >= 18 AND data @> '{"verified":true}'::jsonb)
*/
function buildLogicalClause(
subFilters: Filter[],
tableName: string,
operator: 'OR' | 'AND'
): SQL | undefined {
const clauses: SQL[] = []
for (const subFilter of subFilters) {
const clause = buildFilterClause(subFilter, tableName)
if (clause) {
clauses.push(clause)
}
}
if (clauses.length === 0) return undefined
if (clauses.length === 1) return clauses[0]
return sql`(${sql.join(clauses, sql.raw(` ${operator} `))})`
}
/** Builds JSONB containment clause: `data @> '{"field": value}'::jsonb` (uses GIN index) */
function buildContainmentClause(tableName: string, field: string, value: JsonValue): SQL {
const jsonObj = JSON.stringify({ [field]: value })
return sql`${sql.raw(`${tableName}.data`)} @> ${jsonObj}::jsonb`
}
/** Builds numeric comparison: `(data->>'field')::numeric <op> value` (cannot use GIN index) */
function buildComparisonClause(
tableName: string,
field: string,
operator: '>' | '>=' | '<' | '<=',
value: number
): SQL {
const escapedField = field.replace(/'/g, "''")
return sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric ${sql.raw(operator)} ${value}`
}
/** Builds case-insensitive pattern match: `data->>'field' ILIKE '%value%'` */
function buildContainsClause(tableName: string, field: string, value: string): SQL {
const escapedField = field.replace(/'/g, "''")
return sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} ILIKE ${`%${value}%`}`
}
/**
* Builds a single ORDER BY clause for a field.
* Timestamp fields use direct column access, others use JSONB text extraction.
* Numeric and date columns are cast to appropriate types for correct sorting.
*
* @param tableName - The table name
* @param field - The field name to sort by
* @param direction - Sort direction ('asc' or 'desc')
* @param columnType - Optional column type for type-aware sorting
*/
function buildSortFieldClause(
tableName: string,
field: string,
direction: 'asc' | 'desc',
columnType?: string
): SQL {
const escapedField = field.replace(/'/g, "''")
const directionSql = direction.toUpperCase()
if (field === 'createdAt' || field === 'updatedAt') {
return sql.raw(`${tableName}.${escapedField} ${directionSql}`)
}
const jsonbExtract = `${tableName}.data->>'${escapedField}'`
// Cast to appropriate type for correct sorting
if (columnType === 'number') {
// Cast to numeric, with NULLS LAST to handle null/invalid values
return sql.raw(`(${jsonbExtract})::numeric ${directionSql} NULLS LAST`)
}
if (columnType === 'date') {
// Cast to timestamp for chronological sorting
return sql.raw(`(${jsonbExtract})::timestamp ${directionSql} NULLS LAST`)
}
// Default: sort as text (for string, boolean, json, or unknown types)
return sql.raw(`${jsonbExtract} ${directionSql}`)
}

188
apps/sim/lib/table/types.ts Normal file
View File

@@ -0,0 +1,188 @@
/**
* Type definitions for user-defined tables.
*/
import type { COLUMN_TYPES } from './constants'
export type ColumnValue = string | number | boolean | null | Date
export type JsonValue = ColumnValue | JsonValue[] | { [key: string]: JsonValue }
/** Row data mapping column names to values. */
export type RowData = Record<string, JsonValue>
export type SortDirection = 'asc' | 'desc'
/** Sort specification mapping column names to direction. */
export type Sort = Record<string, SortDirection>
/** Option for dropdown/select components. */
export interface ColumnOption {
value: string
label: string
}
export interface ColumnDefinition {
name: string
type: (typeof COLUMN_TYPES)[number]
required?: boolean
unique?: boolean
}
export interface TableSchema {
columns: ColumnDefinition[]
}
export interface TableDefinition {
id: string
name: string
description?: string | null
schema: TableSchema
rowCount: number
maxRows: number
workspaceId: string
createdBy: string
createdAt: Date | string
updatedAt: Date | string
}
/** Minimal table info for UI components. */
export type TableInfo = Pick<TableDefinition, 'id' | 'name' | 'schema'>
/** Simplified table summary for LLM enrichment and display contexts. */
export interface TableSummary {
name: string
columns: Array<Pick<ColumnDefinition, 'name' | 'type'>>
}
export interface TableRow {
id: string
data: RowData
createdAt: Date | string
updatedAt: Date | string
}
/**
* MongoDB-style query operators for field comparisons.
*
* @example
* { $eq: 'John' }
* { $gte: 18, $lt: 65 }
* { $in: ['active', 'pending'] }
*/
export interface ConditionOperators {
$eq?: ColumnValue
$ne?: ColumnValue
$gt?: number
$gte?: number
$lt?: number
$lte?: number
$in?: ColumnValue[]
$nin?: ColumnValue[]
$contains?: string
}
/**
* Filter object for querying table rows. Supports direct equality shorthand,
* operator objects, and logical $or/$and combinators.
*
* @example
* { name: 'John' }
* { age: { $gte: 18 } }
* { $or: [{ status: 'active' }, { status: 'pending' }] }
*/
export interface Filter {
$or?: Filter[]
$and?: Filter[]
[key: string]: ColumnValue | ConditionOperators | Filter[] | undefined
}
export interface ValidationResult {
valid: boolean
errors: string[]
}
/**
* UI builder state for a single filter rule.
* Includes an `id` field for React keys and string values for form inputs.
*/
export interface FilterRule {
id: string
logicalOperator: 'and' | 'or'
column: string
operator: string
value: string
}
/**
* UI builder state for a single sort rule.
* Includes an `id` field for React keys.
*/
export interface SortRule {
id: string
column: string
direction: SortDirection
}
export interface QueryOptions {
filter?: Filter
sort?: Sort
limit?: number
offset?: number
}
export interface QueryResult {
rows: TableRow[]
rowCount: number
totalCount: number
limit: number
offset: number
}
export interface BulkOperationResult {
affectedCount: number
affectedRowIds: string[]
}
export interface CreateTableData {
name: string
description?: string
schema: TableSchema
workspaceId: string
userId: string
/** Optional max rows override based on billing plan. Defaults to TABLE_LIMITS.MAX_ROWS_PER_TABLE. */
maxRows?: number
}
export interface InsertRowData {
tableId: string
data: RowData
workspaceId: string
}
export interface BatchInsertData {
tableId: string
rows: RowData[]
workspaceId: string
}
export interface UpdateRowData {
tableId: string
rowId: string
data: RowData
workspaceId: string
}
export interface BulkUpdateData {
tableId: string
filter: Filter
data: RowData
limit?: number
workspaceId: string
}
export interface BulkDeleteData {
tableId: string
filter: Filter
limit?: number
workspaceId: string
}

View File

@@ -0,0 +1,533 @@
/**
* Validation utilities for table schemas and row data.
*/
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { and, eq, or, sql } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS } from './constants'
import type { ColumnDefinition, RowData, TableSchema, ValidationResult } from './types'
export type { ColumnDefinition, TableSchema, ValidationResult }
type ValidationSuccess = { valid: true }
type ValidationFailure = { valid: false; response: NextResponse }
/** Options for validating a single row. */
export interface ValidateRowOptions {
rowData: RowData
schema: TableSchema
tableId: string
excludeRowId?: string
checkUnique?: boolean
}
/** Error information for a single row in batch validation. */
export interface BatchRowError {
row: number
errors: string[]
}
/** Options for validating multiple rows in batch. */
export interface ValidateBatchRowsOptions {
rows: RowData[]
schema: TableSchema
tableId: string
checkUnique?: boolean
}
/**
* Validates a single row (size, schema, unique constraints) and returns a formatted response on failure.
* Uses optimized database queries for unique constraint checks to avoid loading all rows into memory.
*/
export async function validateRowData(
options: ValidateRowOptions
): Promise<ValidationSuccess | ValidationFailure> {
const { rowData, schema, tableId, excludeRowId, checkUnique = true } = options
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
),
}
}
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Row data does not match schema', details: schemaValidation.errors },
{ status: 400 }
),
}
}
if (checkUnique) {
// Use optimized database query instead of loading all rows
const uniqueValidation = await checkUniqueConstraintsDb(tableId, rowData, schema, excludeRowId)
if (!uniqueValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
{ status: 400 }
),
}
}
}
return { valid: true }
}
/**
* Validates multiple rows for batch insert (size, schema, unique constraints including within batch).
* Uses optimized database queries for unique constraint checks to avoid loading all rows into memory.
*/
export async function validateBatchRows(
options: ValidateBatchRowsOptions
): Promise<ValidationSuccess | ValidationFailure> {
const { rows, schema, tableId, checkUnique = true } = options
const errors: BatchRowError[] = []
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
errors.push({ row: i, errors: sizeValidation.errors })
continue
}
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
errors.push({ row: i, errors: schemaValidation.errors })
}
}
if (errors.length > 0) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Validation failed for some rows', details: errors },
{ status: 400 }
),
}
}
if (checkUnique) {
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length > 0) {
// Use optimized batch unique constraint check
const uniqueResult = await checkBatchUniqueConstraintsDb(tableId, rows, schema)
if (!uniqueResult.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violations in batch', details: uniqueResult.errors },
{ status: 400 }
),
}
}
}
}
return { valid: true }
}
/** Validates table name format and length. */
export function validateTableName(name: string): ValidationResult {
const errors: string[] = []
if (!name || typeof name !== 'string') {
errors.push('Table name is required')
return { valid: false, errors }
}
if (name.length > TABLE_LIMITS.MAX_TABLE_NAME_LENGTH) {
errors.push(
`Table name exceeds maximum length (${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters)`
)
}
if (!NAME_PATTERN.test(name)) {
errors.push(
'Table name must start with letter or underscore, followed by alphanumeric or underscore'
)
}
return { valid: errors.length === 0, errors }
}
/** Validates table schema structure and column definitions. */
export function validateTableSchema(schema: TableSchema): ValidationResult {
const errors: string[] = []
if (!schema || typeof schema !== 'object') {
errors.push('Schema is required')
return { valid: false, errors }
}
if (!Array.isArray(schema.columns)) {
errors.push('Schema must have columns array')
return { valid: false, errors }
}
if (schema.columns.length === 0) {
errors.push('Schema must have at least one column')
}
if (schema.columns.length > TABLE_LIMITS.MAX_COLUMNS_PER_TABLE) {
errors.push(`Schema exceeds maximum columns (${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE})`)
}
for (const column of schema.columns) {
const columnResult = validateColumnDefinition(column)
errors.push(...columnResult.errors)
}
const columnNames = schema.columns.map((c) => c.name.toLowerCase())
const uniqueNames = new Set(columnNames)
if (uniqueNames.size !== columnNames.length) {
errors.push('Duplicate column names found')
}
return { valid: errors.length === 0, errors }
}
/** Validates row data matches schema column types and required fields. */
export function validateRowAgainstSchema(data: RowData, schema: TableSchema): ValidationResult {
const errors: string[] = []
for (const column of schema.columns) {
const value = data[column.name]
if (column.required && (value === undefined || value === null)) {
errors.push(`Missing required field: ${column.name}`)
continue
}
if (value === null || value === undefined) continue
switch (column.type) {
case 'string':
if (typeof value !== 'string') {
errors.push(`${column.name} must be string, got ${typeof value}`)
} else if (value.length > TABLE_LIMITS.MAX_STRING_VALUE_LENGTH) {
errors.push(`${column.name} exceeds max string length`)
}
break
case 'number':
if (typeof value !== 'number' || Number.isNaN(value)) {
errors.push(`${column.name} must be number`)
}
break
case 'boolean':
if (typeof value !== 'boolean') {
errors.push(`${column.name} must be boolean`)
}
break
case 'date':
if (
!(value instanceof Date) &&
(typeof value !== 'string' || Number.isNaN(Date.parse(value)))
) {
errors.push(`${column.name} must be valid date`)
}
break
case 'json':
try {
JSON.stringify(value)
} catch {
errors.push(`${column.name} must be valid JSON`)
}
break
}
}
return { valid: errors.length === 0, errors }
}
/** Validates row data size is within limits. */
export function validateRowSize(data: RowData): ValidationResult {
const size = JSON.stringify(data).length
if (size > TABLE_LIMITS.MAX_ROW_SIZE_BYTES) {
return {
valid: false,
errors: [`Row size exceeds limit (${size} bytes > ${TABLE_LIMITS.MAX_ROW_SIZE_BYTES} bytes)`],
}
}
return { valid: true, errors: [] }
}
/** Returns columns with unique constraint. */
export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] {
return schema.columns.filter((col) => col.unique === true)
}
/** Validates unique constraints against existing rows (in-memory version for batch validation within a batch). */
export function validateUniqueConstraints(
data: RowData,
schema: TableSchema,
existingRows: { id: string; data: RowData }[],
excludeRowId?: string
): ValidationResult {
const errors: string[] = []
const uniqueColumns = getUniqueColumns(schema)
for (const column of uniqueColumns) {
const value = data[column.name]
if (value === null || value === undefined) continue
const duplicate = existingRows.find((row) => {
if (excludeRowId && row.id === excludeRowId) return false
const existingValue = row.data[column.name]
if (typeof value === 'string' && typeof existingValue === 'string') {
return value.toLowerCase() === existingValue.toLowerCase()
}
return value === existingValue
})
if (duplicate) {
errors.push(
`Column "${column.name}" must be unique. Value "${value}" already exists in row ${duplicate.id}`
)
}
}
return { valid: errors.length === 0, errors }
}
/**
* Checks unique constraints using targeted database queries.
* Only queries for specific conflicting values instead of loading all rows.
* This reduces memory usage from O(n) to O(1) where n is the number of rows.
*/
export async function checkUniqueConstraintsDb(
tableId: string,
data: RowData,
schema: TableSchema,
excludeRowId?: string
): Promise<ValidationResult> {
const errors: string[] = []
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length === 0) {
return { valid: true, errors: [] }
}
// Build conditions for each unique column value
const conditions = []
for (const column of uniqueColumns) {
const value = data[column.name]
if (value === null || value === undefined) continue
// Use JSONB operators to check for existing values
// For strings, use case-insensitive comparison
if (typeof value === 'string') {
conditions.push({
column,
value,
sql: sql`lower(${userTableRows.data}->>${sql.raw(`'${column.name}'`)}) = ${value.toLowerCase()}`,
})
} else {
// For other types, use direct JSONB comparison
conditions.push({
column,
value,
sql: sql`(${userTableRows.data}->${sql.raw(`'${column.name}'`)})::jsonb = ${JSON.stringify(value)}::jsonb`,
})
}
}
if (conditions.length === 0) {
return { valid: true, errors: [] }
}
// Query for each unique column separately to provide specific error messages
for (const condition of conditions) {
const baseCondition = and(eq(userTableRows.tableId, tableId), condition.sql)
const whereClause = excludeRowId
? and(baseCondition, sql`${userTableRows.id} != ${excludeRowId}`)
: baseCondition
const conflictingRow = await db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(whereClause)
.limit(1)
if (conflictingRow.length > 0) {
errors.push(
`Column "${condition.column.name}" must be unique. Value "${condition.value}" already exists in row ${conflictingRow[0].id}`
)
}
}
return { valid: errors.length === 0, errors }
}
/**
* Checks unique constraints for a batch of rows using targeted database queries.
* Validates both against existing database rows and within the batch itself.
*/
export async function checkBatchUniqueConstraintsDb(
tableId: string,
rows: RowData[],
schema: TableSchema
): Promise<{ valid: boolean; errors: Array<{ row: number; errors: string[] }> }> {
const uniqueColumns = getUniqueColumns(schema)
const rowErrors: Array<{ row: number; errors: string[] }> = []
if (uniqueColumns.length === 0) {
return { valid: true, errors: [] }
}
// Build a set of all unique values for each column to check against DB
const valuesByColumn = new Map<string, { values: Set<string>; column: ColumnDefinition }>()
for (const column of uniqueColumns) {
valuesByColumn.set(column.name, { values: new Set(), column })
}
// Collect all unique values from the batch and check for duplicates within the batch
const batchValueMap = new Map<string, Map<string, number>>() // columnName -> (normalizedValue -> firstRowIndex)
for (const column of uniqueColumns) {
batchValueMap.set(column.name, new Map())
}
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
const currentRowErrors: string[] = []
for (const column of uniqueColumns) {
const value = rowData[column.name]
if (value === null || value === undefined) continue
const normalizedValue =
typeof value === 'string' ? value.toLowerCase() : JSON.stringify(value)
// Check for duplicate within batch
const columnValueMap = batchValueMap.get(column.name)!
if (columnValueMap.has(normalizedValue)) {
const firstRowIndex = columnValueMap.get(normalizedValue)!
currentRowErrors.push(
`Column "${column.name}" must be unique. Value "${value}" duplicates row ${firstRowIndex + 1} in batch`
)
} else {
columnValueMap.set(normalizedValue, i)
valuesByColumn.get(column.name)!.values.add(normalizedValue)
}
}
if (currentRowErrors.length > 0) {
rowErrors.push({ row: i, errors: currentRowErrors })
}
}
// Now check against database for all unique values at once
for (const [columnName, { values, column }] of valuesByColumn) {
if (values.size === 0) continue
// Build OR conditions for all values of this column
const valueArray = Array.from(values)
const valueConditions = valueArray.map((normalizedValue) => {
// Check if the original values are strings (normalized values for strings are lowercase)
// We need to determine the type from the column definition or the first row that has this value
const isStringColumn = column.type === 'string'
if (isStringColumn) {
return sql`lower(${userTableRows.data}->>${sql.raw(`'${columnName}'`)}) = ${normalizedValue}`
}
return sql`(${userTableRows.data}->${sql.raw(`'${columnName}'`)})::jsonb = ${normalizedValue}::jsonb`
})
const conflictingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(and(eq(userTableRows.tableId, tableId), or(...valueConditions)))
.limit(valueArray.length) // We only need up to one conflict per value
// Map conflicts back to batch rows
for (const conflict of conflictingRows) {
const conflictData = conflict.data as RowData
const conflictValue = conflictData[columnName]
const normalizedConflictValue =
typeof conflictValue === 'string'
? conflictValue.toLowerCase()
: JSON.stringify(conflictValue)
// Find which batch rows have this conflicting value
for (let i = 0; i < rows.length; i++) {
const rowValue = rows[i][columnName]
if (rowValue === null || rowValue === undefined) continue
const normalizedRowValue =
typeof rowValue === 'string' ? rowValue.toLowerCase() : JSON.stringify(rowValue)
if (normalizedRowValue === normalizedConflictValue) {
// Check if this row already has errors for this column
let rowError = rowErrors.find((e) => e.row === i)
if (!rowError) {
rowError = { row: i, errors: [] }
rowErrors.push(rowError)
}
const errorMsg = `Column "${columnName}" must be unique. Value "${rowValue}" already exists in row ${conflict.id}`
if (!rowError.errors.includes(errorMsg)) {
rowError.errors.push(errorMsg)
}
}
}
}
}
// Sort errors by row index
rowErrors.sort((a, b) => a.row - b.row)
return { valid: rowErrors.length === 0, errors: rowErrors }
}
/** Validates column definition format and type. */
export function validateColumnDefinition(column: ColumnDefinition): ValidationResult {
const errors: string[] = []
if (!column.name || typeof column.name !== 'string') {
errors.push('Column name is required')
return { valid: false, errors }
}
if (column.name.length > TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH) {
errors.push(
`Column name "${column.name}" exceeds maximum length (${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters)`
)
}
if (!NAME_PATTERN.test(column.name)) {
errors.push(
`Column name "${column.name}" must start with letter or underscore, followed by alphanumeric or underscore`
)
}
if (!COLUMN_TYPES.includes(column.type)) {
errors.push(
`Column "${column.name}" has invalid type "${column.type}". Valid types: ${COLUMN_TYPES.join(', ')}`
)
}
return { valid: errors.length === 0, errors }
}

View File

@@ -500,11 +500,14 @@ export async function transformBlockTool(
const userProvidedParams = block.params || {}
const llmSchema = await createLLMToolSchema(toolConfig, userProvidedParams)
const { schema: llmSchema, enrichedDescription } = await createLLMToolSchema(
toolConfig,
userProvidedParams
)
let uniqueToolId = toolConfig.id
let toolName = toolConfig.name
let toolDescription = toolConfig.description
let toolDescription = enrichedDescription || toolConfig.description
if (toolId === 'workflow_executor' && userProvidedParams.workflowId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.workflowId}`
@@ -521,6 +524,8 @@ export async function transformBlockTool(
}
} else if (toolId.startsWith('knowledge_') && userProvidedParams.knowledgeBaseId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
} else if (toolId.startsWith('table_') && userProvidedParams.tableId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.tableId}`
}
const blockParamsFn = blockDef?.tools?.config?.params as

View File

@@ -57,6 +57,54 @@ const ERROR_EXTRACTORS: ErrorExtractorConfig[] = [
examples: ['Various REST APIs'],
extract: (errorInfo) => errorInfo?.data?.details?.[0]?.message,
},
{
id: 'details-string-array',
description: 'Details array containing strings (validation errors)',
examples: ['Table API', 'Validation APIs'],
extract: (errorInfo) => {
const details = errorInfo?.data?.details
if (!Array.isArray(details) || details.length === 0) return undefined
// Check if it's an array of strings
if (details.every((d) => typeof d === 'string')) {
const errorMessage = errorInfo?.data?.error || 'Validation failed'
return `${errorMessage}: ${details.join('; ')}`
}
return undefined
},
},
{
id: 'batch-validation-errors',
description: 'Batch validation errors with row numbers and error arrays',
examples: ['Table Batch Insert'],
extract: (errorInfo) => {
const details = errorInfo?.data?.details
if (!Array.isArray(details) || details.length === 0) return undefined
// Check if it's an array of objects with row numbers and errors
if (
details.every(
(d) =>
typeof d === 'object' &&
d !== null &&
'row' in d &&
'errors' in d &&
Array.isArray(d.errors)
)
) {
const errorMessage = errorInfo?.data?.error || 'Validation failed'
const rowErrors = details
.map((detail: { row: number; errors: string[] }) => {
return `Row ${detail.row}: ${detail.errors.join(', ')}`
})
.join('; ')
return `${errorMessage}: ${rowErrors}`
}
return undefined
},
},
{
id: 'hunter-errors',
description: 'Hunter API error details',
@@ -176,6 +224,8 @@ export const ErrorExtractorId = {
GRAPHQL_ERRORS: 'graphql-errors',
TWITTER_ERRORS: 'twitter-errors',
DETAILS_ARRAY: 'details-array',
DETAILS_STRING_ARRAY: 'details-string-array',
BATCH_VALIDATION_ERRORS: 'batch-validation-errors',
HUNTER_ERRORS: 'hunter-errors',
ERRORS_ARRAY_STRING: 'errors-array-string',
TELEGRAM_DESCRIPTION: 'telegram-description',

View File

@@ -24,22 +24,41 @@ import {
const logger = createLogger('Tools')
/**
* Normalizes a tool ID by stripping resource ID suffix (UUID).
* Normalizes a tool ID by stripping resource ID suffix (UUID/tableId).
* Workflow tools: 'workflow_executor_<uuid>' -> 'workflow_executor'
* Knowledge tools: 'knowledge_search_<uuid>' -> 'knowledge_search'
* Table tools: 'table_query_rows_<tableId>' -> 'table_query_rows'
*/
function normalizeToolId(toolId: string): string {
// Check for workflow_executor_<uuid> pattern
if (toolId.startsWith('workflow_executor_') && toolId.length > 'workflow_executor_'.length) {
return 'workflow_executor'
}
// Check for knowledge_<operation>_<uuid> pattern
const knowledgeOps = ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document']
for (const op of knowledgeOps) {
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
return op
}
}
const tableOps = [
'table_query_rows',
'table_insert_row',
'table_batch_insert_rows',
'table_update_row',
'table_update_rows_by_filter',
'table_delete_rows_by_filter',
'table_upsert_row',
'table_get_row',
'table_delete_row',
'table_get_schema',
]
for (const op of tableOps) {
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
return op
}
}
return toolId
}

View File

@@ -89,7 +89,7 @@ describe('Tool Parameters Utils', () => {
channel: '#general',
}
const schema = await createLLMToolSchema(mockToolConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(mockToolConfig, userProvidedParams)
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, excluded
expect(schema.properties).not.toHaveProperty('channel') // user-provided, excluded
@@ -100,7 +100,7 @@ describe('Tool Parameters Utils', () => {
})
it.concurrent('should include all parameters when none are user-provided', async () => {
const schema = await createLLMToolSchema(mockToolConfig, {})
const { schema } = await createLLMToolSchema(mockToolConfig, {})
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, never shown to LLM
expect(schema.properties).toHaveProperty('message') // user-or-llm, shown to LLM
@@ -332,7 +332,10 @@ describe('Tool Parameters Utils', () => {
inputMapping: '{}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
expect(schema.properties.inputMapping.type).toBe('object')
@@ -347,7 +350,10 @@ describe('Tool Parameters Utils', () => {
inputMapping: '{"query": "", "limit": ""}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
}
@@ -360,7 +366,10 @@ describe('Tool Parameters Utils', () => {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
}
@@ -371,7 +380,7 @@ describe('Tool Parameters Utils', () => {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).not.toHaveProperty('workflowId')
expect(schema.properties).toHaveProperty('inputMapping')
@@ -545,7 +554,7 @@ describe('Tool Parameters Utils', () => {
describe('Type Interface Validation', () => {
it.concurrent('should have properly typed ToolSchema', async () => {
const schema: ToolSchema = await createLLMToolSchema(mockToolConfig, {})
const { schema } = await createLLMToolSchema(mockToolConfig, {})
expect(schema.type).toBe('object')
expect(typeof schema.properties).toBe('object')

View File

@@ -122,6 +122,11 @@ export interface ToolSchema {
required: string[]
}
export interface LLMToolSchemaResult {
schema: ToolSchema
enrichedDescription?: string
}
export interface ValidationResult {
valid: boolean
missingParams: string[]
@@ -434,27 +439,19 @@ export function createUserToolSchema(toolConfig: ToolConfig): ToolSchema {
export async function createLLMToolSchema(
toolConfig: ToolConfig,
userProvidedParams: Record<string, unknown>
): Promise<ToolSchema> {
): Promise<LLMToolSchemaResult> {
const schema: ToolSchema = {
type: 'object',
properties: {},
required: [],
}
// Only include parameters that the LLM should/can provide
for (const [paramId, param] of Object.entries(toolConfig.params)) {
// Check if this param has schema enrichment config
const enrichmentConfig = toolConfig.schemaEnrichment?.[paramId]
// Special handling for workflow_executor's inputMapping parameter
// Always include in LLM schema so LLM can provide dynamic input values
// even if user has configured empty/partial inputMapping in the UI
const isWorkflowInputMapping =
toolConfig.id === 'workflow_executor' && paramId === 'inputMapping'
// Parameters with enrichment config are treated specially:
// - Include them if dependency value is available (even if normally hidden)
// - Skip them if dependency value is not available
if (enrichmentConfig) {
const dependencyValue = userProvidedParams[enrichmentConfig.dependsOn] as string
if (!dependencyValue) {
@@ -476,26 +473,21 @@ export async function createLLMToolSchema(
}
if (!isWorkflowInputMapping) {
// Skip parameters that user has already provided
if (isNonEmpty(userProvidedParams[paramId])) {
continue
}
// Skip parameters that are user-only (never shown to LLM)
if (param.visibility === 'user-only') {
continue
}
// Skip hidden parameters
if (param.visibility === 'hidden') {
continue
}
}
// Add parameter to LLM schema
const propertySchema = buildParameterSchema(toolConfig.id, paramId, param)
// Apply dynamic schema enrichment for workflow_executor's inputMapping
if (isWorkflowInputMapping) {
const workflowId = userProvidedParams.workflowId as string
if (workflowId) {
@@ -505,13 +497,29 @@ export async function createLLMToolSchema(
schema.properties[paramId] = propertySchema
// Add to required if LLM must provide it and it's originally required
if ((param.visibility === 'user-or-llm' || param.visibility === 'llm-only') && param.required) {
schema.required.push(paramId)
}
}
return schema
if (toolConfig.toolEnrichment) {
const dependencyValue = userProvidedParams[toolConfig.toolEnrichment.dependsOn] as string
if (dependencyValue) {
const enriched = await toolConfig.toolEnrichment.enrichTool(
dependencyValue,
schema,
toolConfig.description
)
if (enriched) {
return {
schema: enriched.parameters as ToolSchema,
enrichedDescription: enriched.description,
}
}
}
}
return { schema }
}
/**

View File

@@ -1580,6 +1580,7 @@ import {
spotifyUnfollowPlaylistTool,
spotifyUpdatePlaylistTool,
} from '@/tools/spotify'
import { sqsSendTool } from '@/tools/sqs'
import {
sshCheckCommandExistsTool,
sshCheckFileExistsTool,
@@ -1684,6 +1685,20 @@ import {
supabaseUpsertTool,
supabaseVectorSearchTool,
} from '@/tools/supabase'
import {
tableBatchInsertRowsTool,
tableCreateTool,
tableDeleteRowsByFilterTool,
tableDeleteRowTool,
tableGetRowTool,
tableGetSchemaTool,
tableInsertRowTool,
tableListTool,
tableQueryRowsTool,
tableUpdateRowsByFilterTool,
tableUpdateRowTool,
tableUpsertRowTool,
} from '@/tools/table'
import { tavilyCrawlTool, tavilyExtractTool, tavilyMapTool, tavilySearchTool } from '@/tools/tavily'
import {
telegramDeleteMessageTool,
@@ -1901,7 +1916,6 @@ import {
zoomListRecordingsTool,
zoomUpdateMeetingTool,
} from '@/tools/zoom'
import { sqsSendTool } from './sqs'
// Registry of all available tools
export const tools: Record<string, ToolConfig> = {
@@ -3439,6 +3453,18 @@ export const tools: Record<string, ToolConfig> = {
salesforce_describe_object: salesforceDescribeObjectTool,
salesforce_list_objects: salesforceListObjectsTool,
sqs_send: sqsSendTool,
table_create: tableCreateTool,
table_list: tableListTool,
table_insert_row: tableInsertRowTool,
table_batch_insert_rows: tableBatchInsertRowsTool,
table_upsert_row: tableUpsertRowTool,
table_update_row: tableUpdateRowTool,
table_update_rows_by_filter: tableUpdateRowsByFilterTool,
table_delete_row: tableDeleteRowTool,
table_delete_rows_by_filter: tableDeleteRowsByFilterTool,
table_query_rows: tableQueryRowsTool,
table_get_row: tableGetRowTool,
table_get_schema: tableGetSchemaTool,
mailchimp_get_audiences: mailchimpGetAudiencesTool,
mailchimp_get_audience: mailchimpGetAudienceTool,
mailchimp_create_audience: mailchimpCreateAudienceTool,

View File

@@ -1,7 +1,76 @@
import { createLogger } from '@sim/logger'
import { enrichTableToolDescription, enrichTableToolParameters } from '@/lib/table/llm/enrichment'
import type { TableSummary } from '@/lib/table/types'
const logger = createLogger('SchemaEnrichers')
async function fetchTableSchema(tableId: string): Promise<TableSummary | null> {
try {
const { buildAuthHeaders, buildAPIUrl } = await import('@/executor/utils/http')
const headers = await buildAuthHeaders()
const url = buildAPIUrl(`/api/table/${tableId}/schema`)
const response = await fetch(url.toString(), { headers })
if (!response.ok) {
logger.warn(`Failed to fetch table schema for ${tableId}: ${response.status}`)
return null
}
const result = await response.json()
const data = result.data || result
return {
name: data.name || 'Table',
columns: data.columns || [],
}
} catch (error) {
logger.error('Failed to fetch table schema:', error)
return null
}
}
export async function enrichTableToolSchema(
tableId: string,
toolId: string,
originalSchema: {
type: 'object'
properties: Record<string, unknown>
required: string[]
},
originalDescription: string
): Promise<{
description: string
parameters: {
type: 'object'
properties: Record<string, unknown>
required: string[]
}
} | null> {
const tableSchema = await fetchTableSchema(tableId)
if (!tableSchema) {
return null
}
const enrichedDescription = enrichTableToolDescription(originalDescription, tableSchema, toolId)
const enrichedParams = enrichTableToolParameters(
{ properties: originalSchema.properties, required: originalSchema.required },
tableSchema,
toolId
)
return {
description: enrichedDescription,
parameters: {
type: 'object',
properties: enrichedParams.properties,
required:
enrichedParams.required.length > 0 ? enrichedParams.required : originalSchema.required,
},
}
}
interface TagDefinition {
id: string
tagSlot: string

View File

@@ -0,0 +1,75 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableBatchInsertParams, TableBatchInsertResponse } from './types'
export const tableBatchInsertRowsTool: ToolConfig<
TableBatchInsertParams,
TableBatchInsertResponse
> = {
id: 'table_batch_insert_rows',
name: 'Batch Insert Rows',
description: `Insert multiple rows into a table at once (up to ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows)`,
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_batch_insert_rows', schema, desc),
},
params: {
tableId: {
type: 'string',
required: true,
description: 'Table ID',
visibility: 'user-only',
},
rows: {
type: 'array',
required: true,
description: `Array of row data objects (max ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows)`,
visibility: 'user-or-llm',
},
},
request: {
url: (params: TableBatchInsertParams) => `/api/table/${params.tableId}/rows`,
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: TableBatchInsertParams) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
throw new Error('Workspace ID is required in execution context')
}
return {
rows: params.rows,
workspaceId,
}
},
},
transformResponse: async (response): Promise<TableBatchInsertResponse> => {
const result = await response.json()
const data = result.data || result
return {
success: true,
output: {
rows: data.rows,
insertedCount: data.insertedCount,
message: data.message || 'Rows inserted successfully',
},
}
},
outputs: {
success: { type: 'boolean', description: 'Whether rows were inserted' },
rows: { type: 'array', description: 'Inserted rows data' },
insertedCount: { type: 'number', description: 'Number of rows inserted' },
message: { type: 'string', description: 'Status message' },
},
}

View File

@@ -0,0 +1,70 @@
import type { ToolConfig } from '@/tools/types'
import type { TableCreateParams, TableCreateResponse } from './types'
export const tableCreateTool: ToolConfig<TableCreateParams, TableCreateResponse> = {
id: 'table_create',
name: 'Create Table',
description: 'Create a new user-defined table with schema',
version: '1.0.0',
params: {
name: {
type: 'string',
required: true,
description: 'Table name (alphanumeric, underscores, 1-50 chars)',
visibility: 'user-or-llm',
},
description: {
type: 'string',
required: false,
description: 'Optional table description',
visibility: 'user-or-llm',
},
schema: {
type: 'object',
required: true,
description: 'Table schema with column definitions',
visibility: 'user-or-llm',
},
},
request: {
url: '/api/table',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
throw new Error('Workspace ID is required in execution context')
}
return {
name: params.name,
description: params.description,
schema: params.schema,
workspaceId,
}
},
},
transformResponse: async (response): Promise<TableCreateResponse> => {
const result = await response.json()
const data = result.data || result
return {
success: true,
output: {
table: data.table,
message: data.message || 'Table created successfully',
},
}
},
outputs: {
success: { type: 'boolean', description: 'Whether table was created' },
table: { type: 'json', description: 'Created table metadata' },
message: { type: 'string', description: 'Status message' },
},
}

View File

@@ -0,0 +1,61 @@
import type { ToolConfig } from '@/tools/types'
import type { TableDeleteResponse, TableRowDeleteParams } from './types'
export const tableDeleteRowTool: ToolConfig<TableRowDeleteParams, TableDeleteResponse> = {
id: 'table_delete_row',
name: 'Delete Row',
description: 'Delete a row from a table',
version: '1.0.0',
params: {
tableId: {
type: 'string',
required: true,
description: 'Table ID',
visibility: 'user-only',
},
rowId: {
type: 'string',
required: true,
description: 'Row ID to delete',
visibility: 'user-or-llm',
},
},
request: {
url: (params: TableRowDeleteParams) => `/api/table/${params.tableId}/rows/${params.rowId}`,
method: 'DELETE',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: TableRowDeleteParams) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
throw new Error('Workspace ID is required in execution context')
}
return {
workspaceId,
}
},
},
transformResponse: async (response): Promise<TableDeleteResponse> => {
const result = await response.json()
const data = result.data || result
return {
success: true,
output: {
deletedCount: data.deletedCount,
message: data.message || 'Row deleted successfully',
},
}
},
outputs: {
success: { type: 'boolean', description: 'Whether row was deleted' },
deletedCount: { type: 'number', description: 'Number of rows deleted' },
message: { type: 'string', description: 'Status message' },
},
}

View File

@@ -0,0 +1,83 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableBulkOperationResponse, TableDeleteByFilterParams } from './types'
export const tableDeleteRowsByFilterTool: ToolConfig<
TableDeleteByFilterParams,
TableBulkOperationResponse
> = {
id: 'table_delete_rows_by_filter',
name: 'Delete Rows by Filter',
description:
'Delete multiple rows that match filter criteria. Use with caution - supports optional limit for safety.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_delete_rows_by_filter', schema, desc),
},
params: {
tableId: {
type: 'string',
required: true,
description: 'Table ID',
visibility: 'user-only',
},
filter: {
type: 'object',
required: true,
description: 'Filter criteria using operators like $eq, $ne, $gt, $lt, $contains, $in, etc.',
visibility: 'user-or-llm',
},
limit: {
type: 'number',
required: false,
description: `Maximum number of rows to delete (default: no limit, max: ${TABLE_LIMITS.MAX_BULK_OPERATION_SIZE})`,
visibility: 'user-or-llm',
},
},
request: {
url: (params: TableDeleteByFilterParams) => `/api/table/${params.tableId}/rows`,
method: 'DELETE',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params: TableDeleteByFilterParams) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
throw new Error('Workspace ID is required in execution context')
}
return {
filter: params.filter,
limit: params.limit,
workspaceId,
}
},
},
transformResponse: async (response): Promise<TableBulkOperationResponse> => {
const result = await response.json()
const data = result.data || result
return {
success: true,
output: {
deletedCount: data.deletedCount || 0,
deletedRowIds: data.deletedRowIds || [],
message: data.message || 'Rows deleted successfully',
},
}
},
outputs: {
success: { type: 'boolean', description: 'Whether rows were deleted' },
deletedCount: { type: 'number', description: 'Number of rows deleted' },
deletedRowIds: { type: 'array', description: 'IDs of deleted rows' },
message: { type: 'string', description: 'Status message' },
},
}

Some files were not shown because too many files have changed in this diff Show More