This commit is contained in:
Lakee Sivaraya
2026-01-13 15:37:24 -08:00
parent e80660f218
commit 4316f45175
8 changed files with 553 additions and 287 deletions

250
PLAN.md
View File

@@ -1,250 +0,0 @@
# Table Block Implementation Plan
> Create a new "table" block type that enables users to define schemas and perform CRUD operations on lightweight, workspace/workflow-scoped tables stored in the existing PostgreSQL database using JSONB with application-level schema enforcement.
## Table of Contents
- [Architecture Overview](#architecture-overview)
- [Data Model](#data-model)
- [Implementation Files](#implementation-files)
- [Key Design Decisions](#key-design-decisions)
- [Limits and Limitations](#limits-and-limitations)
- [Implementation Checklist](#implementation-checklist)
## Architecture Overview
```mermaid
flowchart TB
subgraph UI [Block UI Layer]
TableBlock[Table Block]
SchemaEditor[Schema Editor SubBlock]
end
subgraph Tools [Tool Layer]
CreateTable[table_create]
Insert[table_insert]
Select[table_select]
Update[table_update]
Delete[table_delete]
DropTable[table_drop]
end
subgraph API [API Routes]
TableAPI["/api/tables"]
RowAPI["/api/tables/tableId/rows"]
end
subgraph DB [PostgreSQL]
SimTable[sim_table]
SimTableRow[sim_table_row]
end
TableBlock --> Tools
Tools --> API
API --> DB
```
## Data Model
Two new tables in the existing PostgreSQL database:
### `sim_table` - Table Definitions
| Column | Type | Description |
|--------|------|-------------|
| id | text | Primary key |
| workspace_id | text | FK to workspace |
| workflow_id | text | FK to workflow (nullable for workspace-scope) |
| name | text | Table name (unique per scope) |
| schema | jsonb | Column definitions with types/constraints |
| created_by | text | FK to user |
| created_at | timestamp | Creation time |
| updated_at | timestamp | Last update time |
### `sim_table_row` - Row Data
| Column | Type | Description |
|--------|------|-------------|
| id | text | Primary key |
| table_id | text | FK to sim_table |
| data | jsonb | Row data (validated against schema) |
| created_at | timestamp | Creation time |
| updated_at | timestamp | Last update time |
### Schema Format
**Example schema definition:**
```json
{
"columns": [
{ "name": "id", "type": "string", "primaryKey": true },
{ "name": "email", "type": "string", "required": true, "unique": true },
{ "name": "age", "type": "number" },
{ "name": "active", "type": "boolean", "default": true }
]
}
```
**Supported Types:** `string`, `number`, `boolean`, `date`, `json`
## Implementation Files
### 1. Database Schema
- `packages/db/schema.ts` - Add `simTable` and `simTableRow` table definitions
- Generate migration for the new tables
### 2. Tools (`apps/sim/tools/table/`)
| File | Purpose |
|------|---------|
| `types.ts` | Type definitions for params/responses |
| `create.ts` | Create table with schema |
| `insert.ts` | Insert row(s) with schema validation |
| `select.ts` | Query rows with filtering |
| `update.ts` | Update rows with schema validation |
| `delete.ts` | Delete rows |
| `drop.ts` | Drop table |
| `index.ts` | Barrel export |
### 3. Block Definition
- `apps/sim/blocks/blocks/table.ts` - Block config with:
- Operation dropdown (create, insert, select, update, delete, drop)
- Scope selector (workspace/workflow)
- Table selector (for existing tables)
- Schema editor (for create operation)
- Data/query inputs (operation-specific)
### 4. API Routes
- `apps/sim/app/api/tables/route.ts` - Create table, list tables
- `apps/sim/app/api/tables/[tableId]/route.ts` - Get/drop table
- `apps/sim/app/api/tables/[tableId]/rows/route.ts` - CRUD on rows
### 5. Schema Validation Library
- `apps/sim/lib/tables/schema.ts` - Schema validation utilities
- `apps/sim/lib/tables/types.ts` - Shared types
## Key Design Decisions
1. **Schema Enforcement**: Application-layer validation before database writes. JSONB stores data, but every insert/update validates against the table's schema.
2. **Concurrency**: PostgreSQL handles concurrent reads/writes natively. Row-level locking for updates.
3. **Indexing**: GIN index on `data` column for efficient JSONB queries. Additional indexes on `table_id` for fast row lookups.
4. **Scope Resolution**: Tables with `workflow_id = NULL` are workspace-scoped. Tables with `workflow_id` set are workflow-scoped.
5. **Table Selector**: New SubBlock type `table-selector` that fetches available tables based on current workspace/workflow context.
## Limits and Limitations
### Table Limits
| Limit | Free Plan | Pro Plan | Enterprise |
|-------|-----------|----------|------------|
| Tables per workspace | 10 | 50 | Unlimited |
| Tables per workflow | 5 | 20 | Unlimited |
| Columns per table | 50 | 100 | 200 |
### Row Limits
| Limit | Free Plan | Pro Plan | Enterprise |
|-------|-----------|----------|------------|
| Rows per table | 10,000 | 100,000 | 1,000,000 |
| Batch insert size | 100 | 500 | 1,000 |
| Batch update/delete size | 100 | 500 | 1,000 |
### Size Limits
| Limit | Value | Rationale |
|-------|-------|-----------|
| Column name length | 64 chars | PostgreSQL identifier limit |
| Table name length | 64 chars | PostgreSQL identifier limit |
| String field max length | 65,535 chars | ~64KB per text field |
| JSON field max size | 1 MB | PostgreSQL JSONB practical limit |
| Single row max size | 2 MB | Reasonable row size limit |
| Total table data size | Based on plan | Tied to workspace storage quota |
### Query Limits
| Limit | Value | Notes |
|-------|-------|-------|
| Default page size | 100 rows | Can be overridden up to max |
| Max page size | 1,000 rows | Prevents memory issues |
| Max filter conditions | 20 | AND/OR conditions combined |
| Query timeout | 30 seconds | Prevents long-running queries |
| Max concurrent queries per table | 50 | Rate limiting per table |
### Schema Constraints
| Constraint | Limit |
|------------|-------|
| Primary key columns | 1 (single column only) |
| Unique constraints | 5 per table |
| Required (NOT NULL) columns | Unlimited |
| Default values | Supported for all types |
| Foreign keys | Not supported (v1) |
| Computed columns | Not supported (v1) |
| Indexes | Auto-created for primary key and unique columns |
### Data Type Specifications
| Type | Storage | Min | Max | Notes |
|------|---------|-----|-----|-------|
| `string` | text | 0 chars | 65,535 chars | UTF-8 encoded |
| `number` | double precision | -1.7e308 | 1.7e308 | IEEE 754 double |
| `boolean` | boolean | - | - | true/false |
| `date` | timestamp | 4713 BC | 294276 AD | ISO 8601 format |
| `json` | jsonb | - | 1 MB | Nested objects/arrays |
### Operational Limitations
1. **No Transactions Across Tables**: Each operation is atomic to a single table. Cross-table transactions are not supported.
2. **No JOINs**: Cannot join data between tables. Use workflow logic to combine data from multiple tables.
3. **No Triggers/Hooks**: No automatic actions on insert/update/delete. Use workflow blocks for reactive logic.
4. **No Full-Text Search**: Basic filtering only. For full-text search, use the Knowledge Base feature.
5. **No Schema Migrations**: Schema changes require dropping and recreating the table (with data loss). Future versions may support additive migrations.
6. **Query Complexity**: Only basic operators supported:
- Comparison: `=`, `!=`, `>`, `<`, `>=`, `<=`
- String: `LIKE`, `ILIKE`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS`
- Logical: `AND`, `OR`, `NOT`
- Null checks: `IS NULL`, `IS NOT NULL`
- Array: `IN`, `NOT IN`
### Performance Characteristics
| Operation | Expected Latency | Notes |
|-----------|------------------|-------|
| Insert (single row) | < 50ms | With schema validation |
| Insert (batch 100) | < 200ms | Parallel validation |
| Select (indexed) | < 20ms | Primary key or unique column |
| Select (filtered, 1K rows) | < 100ms | With GIN index |
| Update (single row) | < 50ms | By primary key |
| Delete (single row) | < 30ms | By primary key |
### Storage Accounting
- Table storage counts toward workspace storage quota
- Calculated as: `sum(row_data_size) + schema_overhead`
- Schema overhead: ~1KB per table
- Row overhead: ~100 bytes per row (metadata, timestamps)
## Implementation Checklist
- [ ] Add `simTable` and `simTableRow` to `packages/db/schema.ts` and generate migration
- [ ] Create `apps/sim/lib/tables/` with schema validation and types
- [ ] Create `apps/sim/tools/table/` with all 6 tool implementations
- [ ] Register tools in `apps/sim/tools/registry.ts`
- [ ] Create API routes for tables and rows CRUD operations
- [ ] Create `apps/sim/blocks/blocks/table.ts` block definition
- [ ] Register block in `apps/sim/blocks/registry.ts`
- [ ] Add `TableIcon` to `apps/sim/components/icons.tsx`

View File

@@ -7,7 +7,12 @@ import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { TableSchema } from '@/lib/table'
import { validateRowAgainstSchema, validateRowSize } from '@/lib/table'
import {
getUniqueColumns,
validateRowAgainstSchema,
validateRowSize,
validateUniqueConstraints,
} from '@/lib/table'
const logger = createLogger('TableRowAPI')
@@ -210,6 +215,33 @@ export async function PATCH(
)
}
// Check unique constraints if any unique columns exist
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// Fetch existing rows to check for uniqueness
const existingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
const uniqueValidation = validateUniqueConstraints(
validated.data,
table.schema as TableSchema,
existingRows,
rowId // Exclude the current row being updated
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
{ status: 400 }
)
}
}
// Update row
const now = new Date()

View File

@@ -7,7 +7,13 @@ import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { QueryFilter, TableSchema } from '@/lib/table'
import { TABLE_LIMITS, validateRowAgainstSchema, validateRowSize } from '@/lib/table'
import {
getUniqueColumns,
TABLE_LIMITS,
validateRowAgainstSchema,
validateRowSize,
validateUniqueConstraints,
} from '@/lib/table'
import { buildFilterClause, buildSortClause } from '@/lib/table/query-builder'
const logger = createLogger('TableRowsAPI')
@@ -161,6 +167,49 @@ async function handleBatchInsert(requestId: string, tableId: string, body: any,
)
}
// Check unique constraints if any unique columns exist
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// Fetch existing rows to check for uniqueness
const existingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
// Validate each row for unique constraints
for (let i = 0; i < validated.rows.length; i++) {
const rowData = validated.rows[i]
// Also check against other rows in the batch
const batchRows = validated.rows.slice(0, i).map((data, idx) => ({
id: `batch_${idx}`,
data,
}))
const uniqueValidation = validateUniqueConstraints(rowData, table.schema as TableSchema, [
...existingRows,
...batchRows,
])
if (!uniqueValidation.valid) {
errors.push({ row: i, errors: uniqueValidation.errors })
}
}
if (errors.length > 0) {
return NextResponse.json(
{
error: 'Unique constraint violations in batch',
details: errors,
},
{ status: 400 }
)
}
}
// Insert all rows
const now = new Date()
const rowsToInsert = validated.rows.map((data) => ({
@@ -271,6 +320,32 @@ export async function POST(
)
}
// Check unique constraints if any unique columns exist
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// Fetch existing rows to check for uniqueness
const existingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
const uniqueValidation = validateUniqueConstraints(
validated.data,
table.schema as TableSchema,
existingRows
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
{ status: 400 }
)
}
}
// Check row count limit
if (table.rowCount >= table.maxRows) {
return NextResponse.json(
@@ -589,6 +664,41 @@ export async function PUT(
}
}
// Check unique constraints if any unique columns exist
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// Fetch all rows (not just matching ones) to check for uniqueness
const allRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
// Validate each updated row for unique constraints
for (const row of matchingRows) {
const mergedData = { ...row.data, ...validated.data }
const uniqueValidation = validateUniqueConstraints(
mergedData,
table.schema as TableSchema,
allRows,
row.id // Exclude the current row being updated
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{
error: 'Unique constraint violation',
details: uniqueValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
}
// Update rows by merging existing data with new data in batches
const now = new Date()
const BATCH_SIZE = 100 // Smaller batch for updates since each is a separate query

View File

@@ -19,6 +19,7 @@ const ColumnSchema = z.object({
.regex(/^[a-z_][a-z0-9_]*$/i, 'Invalid column name'),
type: z.enum(['string', 'number', 'boolean', 'date', 'json']),
required: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
})
const CreateTableSchema = z.object({

View File

@@ -6,6 +6,8 @@ import { useQuery } from '@tanstack/react-query'
import {
ChevronLeft,
ChevronRight,
Columns,
Copy,
Edit,
Filter,
HelpCircle,
@@ -20,6 +22,9 @@ import {
Button,
Checkbox,
Input,
Modal,
ModalBody,
ModalContent,
Popover,
PopoverContent,
PopoverTrigger,
@@ -61,6 +66,14 @@ interface TableData {
updatedAt: string
}
interface CellViewerData {
columnName: string
value: any
type: 'json' | 'text'
}
const STRING_TRUNCATE_LENGTH = 50
export function TableDataViewer() {
const params = useParams()
const router = useRouter()
@@ -76,6 +89,9 @@ export function TableDataViewer() {
const [showAddModal, setShowAddModal] = useState(false)
const [editingRow, setEditingRow] = useState<TableRowData | null>(null)
const [deletingRows, setDeletingRows] = useState<string[]>([])
const [cellViewer, setCellViewer] = useState<CellViewerData | null>(null)
const [showSchemaModal, setShowSchemaModal] = useState(false)
const [copied, setCopied] = useState(false)
// Fetch table metadata
const { data: tableData, isLoading: isLoadingTable } = useQuery({
@@ -171,6 +187,18 @@ export function TableDataViewer() {
setDeletingRows(Array.from(selectedRows))
}, [selectedRows])
const handleCopyCellValue = useCallback(async () => {
if (cellViewer) {
const text =
cellViewer.type === 'json'
? JSON.stringify(cellViewer.value, null, 2)
: String(cellViewer.value)
await navigator.clipboard.writeText(text)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
}, [cellViewer])
const formatValue = (value: any, type: string): string => {
if (value === null || value === undefined) return '—'
@@ -192,6 +220,66 @@ export function TableDataViewer() {
}
}
const handleCellClick = useCallback(
(e: React.MouseEvent, columnName: string, value: any, type: 'json' | 'text') => {
e.preventDefault()
e.stopPropagation()
setCellViewer({ columnName, value, type })
},
[]
)
const renderCellValue = (value: any, column: { name: string; type: string }) => {
const isNull = value === null || value === undefined
if (isNull) {
return <span className='text-[var(--text-muted)] italic'></span>
}
if (column.type === 'json') {
const jsonStr = JSON.stringify(value)
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate rounded-[4px] border border-[var(--border-1)] px-[6px] py-[2px] text-left font-mono text-[11px] text-[var(--brand-secondary)] transition-colors hover:border-[var(--text-muted)] hover:text-[var(--text-primary)]'
onClick={(e) => handleCellClick(e, column.name, value, 'json')}
title='Click to view full JSON'
>
{jsonStr}
</button>
)
}
if (column.type === 'boolean') {
return (
<span className={value ? 'text-green-500' : 'text-[var(--text-tertiary)]'}>
{value ? 'true' : 'false'}
</span>
)
}
if (column.type === 'number') {
return <span className='font-mono text-[var(--brand-secondary)]'>{String(value)}</span>
}
// Handle long strings
const strValue = String(value)
if (strValue.length > STRING_TRUNCATE_LENGTH) {
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate text-left text-[var(--text-primary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:decoration-[var(--text-muted)]'
onClick={(e) => handleCellClick(e, column.name, value, 'text')}
title='Click to view full text'
>
{strValue}
</button>
)
}
return <span className='text-[var(--text-primary)]'>{strValue}</span>
}
if (isLoadingTable) {
return (
<div className='flex h-full items-center justify-center'>
@@ -229,6 +317,15 @@ export function TableDataViewer() {
</div>
<div className='flex items-center gap-[8px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={() => setShowSchemaModal(true)}>
<Columns className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>View Schema</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={handleRefresh}>
@@ -471,22 +568,9 @@ export function TableDataViewer() {
</TableCell>
{columns.map((column) => (
<TableCell key={column.name}>
<span
className={cn(
'block max-w-[300px] truncate text-[13px]',
row.data[column.name] === null || row.data[column.name] === undefined
? 'text-[var(--text-muted)] italic'
: column.type === 'boolean'
? row.data[column.name]
? 'text-green-500'
: 'text-[var(--text-tertiary)]'
: column.type === 'number'
? 'font-mono text-[var(--brand-secondary)]'
: 'text-[var(--text-primary)]'
)}
>
{formatValue(row.data[column.name], column.type)}
</span>
<div className='max-w-[300px] truncate text-[13px]'>
{renderCellValue(row.data[column.name], column)}
</div>
</TableCell>
))}
<TableCell>
@@ -585,6 +669,125 @@ export function TableDataViewer() {
}}
/>
)}
{/* Schema Viewer Modal */}
<Modal open={showSchemaModal} onOpenChange={setShowSchemaModal}>
<ModalContent className='w-[500px] duration-100'>
<div className='flex items-center justify-between gap-[8px] px-[16px] py-[10px]'>
<div className='flex min-w-0 items-center gap-[8px]'>
<Columns className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
<span className='font-medium text-[14px] text-[var(--text-primary)]'>
Table Schema
</span>
<Badge variant='gray' size='sm'>
{columns.length} columns
</Badge>
</div>
<Button variant='ghost' size='sm' onClick={() => setShowSchemaModal(false)}>
<X className='h-[14px] w-[14px]' />
</Button>
</div>
<ModalBody className='p-0'>
<div className='max-h-[400px] overflow-auto'>
<Table>
<TableHeader>
<TableRow>
<TableHead className='w-[180px]'>Column</TableHead>
<TableHead className='w-[100px]'>Type</TableHead>
<TableHead>Constraints</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{columns.map((column) => (
<TableRow key={column.name}>
<TableCell className='font-mono text-[12px] text-[var(--text-primary)]'>
{column.name}
</TableCell>
<TableCell>
<Badge
variant={
column.type === 'string'
? 'green'
: column.type === 'number'
? 'blue'
: column.type === 'boolean'
? 'purple'
: column.type === 'json'
? 'orange'
: column.type === 'date'
? 'teal'
: 'gray'
}
size='sm'
>
{column.type}
</Badge>
</TableCell>
<TableCell className='text-[12px]'>
<div className='flex gap-[6px]'>
{column.required && (
<Badge variant='red' size='sm'>
required
</Badge>
)}
{column.unique && (
<Badge variant='purple' size='sm'>
unique
</Badge>
)}
{!column.required && !column.unique && (
<span className='text-[var(--text-muted)]'></span>
)}
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div>
</ModalBody>
</ModalContent>
</Modal>
{/* Cell Viewer Modal */}
<Modal open={!!cellViewer} onOpenChange={(open) => !open && setCellViewer(null)}>
<ModalContent className='w-[640px] duration-100'>
<div className='flex items-center justify-between gap-[8px] px-[16px] py-[10px]'>
<div className='flex min-w-0 items-center gap-[8px]'>
<span className='truncate font-medium text-[14px] text-[var(--text-primary)]'>
{cellViewer?.columnName}
</span>
<Badge variant={cellViewer?.type === 'json' ? 'blue' : 'gray'} size='sm'>
{cellViewer?.type === 'json' ? 'JSON' : 'Text'}
</Badge>
</div>
<div className='flex shrink-0 items-center gap-[8px]'>
<Button
variant={copied ? 'tertiary' : 'default'}
size='sm'
onClick={handleCopyCellValue}
>
<Copy className='mr-[4px] h-[12px] w-[12px]' />
{copied ? 'Copied!' : 'Copy'}
</Button>
<Button variant='ghost' size='sm' onClick={() => setCellViewer(null)}>
<X className='h-[14px] w-[14px]' />
</Button>
</div>
</div>
<ModalBody className='p-0'>
{cellViewer?.type === 'json' ? (
<pre className='m-[16px] max-h-[450px] overflow-auto rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] font-mono text-[12px] text-[var(--text-primary)] leading-[1.6]'>
{cellViewer ? JSON.stringify(cellViewer.value, null, 2) : ''}
</pre>
) : (
<div className='m-[16px] max-h-[450px] overflow-auto whitespace-pre-wrap break-words rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] text-[13px] text-[var(--text-primary)] leading-[1.7]'>
{cellViewer ? String(cellViewer.value) : ''}
</div>
)}
</ModalBody>
</ModalContent>
</Modal>
</div>
)
}

View File

@@ -2,9 +2,10 @@
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { Database, MoreVertical, Trash2 } from 'lucide-react'
import { Columns, Database, MoreVertical, Trash2 } from 'lucide-react'
import { useRouter } from 'next/navigation'
import {
Badge,
Button,
Modal,
ModalBody,
@@ -15,6 +16,12 @@ import {
PopoverContent,
PopoverItem,
PopoverTrigger,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import { useDeleteTable } from '@/hooks/queries/use-tables'
import type { TableDefinition } from '@/tools/table/types'
@@ -29,6 +36,7 @@ interface TableCardProps {
export function TableCard({ table, workspaceId }: TableCardProps) {
const router = useRouter()
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
const [isSchemaModalOpen, setIsSchemaModalOpen] = useState(false)
const [isMenuOpen, setIsMenuOpen] = useState(false)
const deleteTable = useDeleteTable(workspaceId)
@@ -93,12 +101,23 @@ export function TableCard({ table, workspaceId }: TableCardProps) {
</Button>
</PopoverTrigger>
<PopoverContent align='end' className='w-[160px]'>
<PopoverItem
onClick={(e) => {
e.stopPropagation()
setIsMenuOpen(false)
setIsSchemaModalOpen(true)
}}
>
<Columns className='mr-[8px] h-[14px] w-[14px]' />
View Schema
</PopoverItem>
<PopoverItem
onClick={(e) => {
e.stopPropagation()
setIsMenuOpen(false)
setIsDeleteDialogOpen(true)
}}
className='text-[var(--text-error)] hover:text-[var(--text-error)]'
>
<Trash2 className='mr-[8px] h-[14px] w-[14px]' />
Delete
@@ -138,6 +157,79 @@ export function TableCard({ table, workspaceId }: TableCardProps) {
</ModalFooter>
</ModalContent>
</Modal>
<Modal open={isSchemaModalOpen} onOpenChange={setIsSchemaModalOpen}>
<ModalContent className='w-[500px] duration-100'>
<ModalHeader>
<div className='flex items-center gap-[8px]'>
<Columns className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
<span>{table.name}</span>
<Badge variant='gray' size='sm'>
{columnCount} columns
</Badge>
</div>
</ModalHeader>
<ModalBody className='p-0'>
<div className='max-h-[400px] overflow-auto'>
<Table>
<TableHeader>
<TableRow>
<TableHead className='w-[180px]'>Column</TableHead>
<TableHead className='w-[100px]'>Type</TableHead>
<TableHead>Constraints</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{table.schema.columns.map((column) => (
<TableRow key={column.name}>
<TableCell className='font-mono text-[12px] text-[var(--text-primary)]'>
{column.name}
</TableCell>
<TableCell>
<Badge
variant={
column.type === 'string'
? 'green'
: column.type === 'number'
? 'blue'
: column.type === 'boolean'
? 'purple'
: column.type === 'json'
? 'orange'
: column.type === 'date'
? 'teal'
: 'gray'
}
size='sm'
>
{column.type}
</Badge>
</TableCell>
<TableCell className='text-[12px]'>
<div className='flex gap-[6px]'>
{column.required && (
<Badge variant='red' size='sm'>
required
</Badge>
)}
{column.unique && (
<Badge variant='purple' size='sm'>
unique
</Badge>
)}
{!column.required && !column.unique && (
<span className='text-[var(--text-muted)]'></span>
)}
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div>
</ModalBody>
</ModalContent>
</Modal>
</>
)
}

View File

@@ -6,6 +6,10 @@
* JSONB fields using various operators ($eq, $ne, $gt, $gte, $lt, $lte, $in, $nin, $contains)
* and sorting by both JSONB fields and built-in columns (createdAt, updatedAt).
*
* IMPORTANT: For equality operations ($eq and direct value), we use the JSONB
* containment operator (@>) which can leverage the GIN index on the data column.
* For comparison operators ($gt, $lt, etc.) and pattern matching ($contains),
* we must use the text extraction operator (->>) which cannot use the GIN index.
*/
import type { SQL } from 'drizzle-orm'
@@ -27,15 +31,36 @@ export interface QueryFilter {
}
}
/**
* Build a JSONB containment clause that can use the GIN index.
* Creates: data @> '{"field": value}'::jsonb
*/
function buildContainmentClause(tableName: string, field: string, value: any): SQL {
// Build the JSONB object for containment check
const jsonObj = JSON.stringify({ [field]: value })
return sql`${sql.raw(`${tableName}.data`)} @> ${jsonObj}::jsonb`
}
/**
* Build WHERE clause from filter object
* Supports: $eq, $ne, $gt, $gte, $lt, $lte, $in, $nin, $contains
*
* Uses GIN-index-compatible containment operator (@>) for:
* - $eq (equality)
* - Direct value equality
* - $in (as OR of containment checks)
*
* Uses text extraction (->>) for operators that require it:
* - $ne (not equals - no containment equivalent)
* - $gt, $gte, $lt, $lte (numeric comparisons)
* - $nin (not in)
* - $contains (pattern matching)
*/
export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | undefined {
const conditions: SQL[] = []
for (const [field, condition] of Object.entries(filter)) {
// Escape field name to prevent SQL injection
// Escape field name to prevent SQL injection (for ->> operators)
const escapedField = field.replace(/'/g, "''")
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
@@ -43,16 +68,15 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL |
for (const [op, value] of Object.entries(condition)) {
switch (op) {
case '$eq':
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ${String(value)}`
)
// Use containment operator for GIN index support
conditions.push(buildContainmentClause(tableName, field, value))
break
case '$ne':
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} != ${String(value)}`
)
// NOT containment - still uses GIN index for the containment check
conditions.push(sql`NOT (${buildContainmentClause(tableName, field, value)})`)
break
case '$gt':
// Numeric comparison requires text extraction (no GIN support)
conditions.push(
sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric > ${value}`
)
@@ -73,22 +97,29 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL |
)
break
case '$in':
// Use OR of containment checks for GIN index support
if (Array.isArray(value) && value.length > 0) {
const valuesList = value.map((v) => String(v))
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ANY(${valuesList})`
)
if (value.length === 1) {
// Single value - just use containment
conditions.push(buildContainmentClause(tableName, field, value[0]))
} else {
// Multiple values - OR of containment checks
const inConditions = value.map((v) => buildContainmentClause(tableName, field, v))
conditions.push(sql`(${sql.join(inConditions, sql.raw(' OR '))})`)
}
}
break
case '$nin':
// NOT IN requires checking none of the values match
if (Array.isArray(value) && value.length > 0) {
const valuesList = value.map((v) => String(v))
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} != ALL(${valuesList})`
const ninConditions = value.map(
(v) => sql`NOT (${buildContainmentClause(tableName, field, v)})`
)
conditions.push(sql`(${sql.join(ninConditions, sql.raw(' AND '))})`)
}
break
case '$contains':
// Pattern matching requires text extraction (no GIN support)
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} ILIKE ${`%${value}%`}`
)
@@ -96,10 +127,8 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL |
}
}
} else {
// Direct equality
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ${String(condition)}`
)
// Direct equality - use containment operator for GIN index support
conditions.push(buildContainmentClause(tableName, field, condition))
}
}

View File

@@ -205,3 +205,52 @@ export function validateRowAgainstSchema(
export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] {
return schema.columns.filter((col) => col.unique === true)
}
/**
* Validates unique constraints for row data
* Checks if values for unique columns would violate uniqueness
*/
export function validateUniqueConstraints(
data: Record<string, any>,
schema: TableSchema,
existingRows: Array<{ id: string; data: Record<string, any> }>,
excludeRowId?: string
): ValidationResult {
const errors: string[] = []
const uniqueColumns = getUniqueColumns(schema)
for (const column of uniqueColumns) {
const value = data[column.name]
// Skip null/undefined values for optional unique columns
if (value === null || value === undefined) {
continue
}
// Check if value exists in other rows
const duplicate = existingRows.find((row) => {
// Skip the row being updated
if (excludeRowId && row.id === excludeRowId) {
return false
}
// Check if value matches (case-insensitive for strings)
const existingValue = row.data[column.name]
if (typeof value === 'string' && typeof existingValue === 'string') {
return value.toLowerCase() === existingValue.toLowerCase()
}
return value === existingValue
})
if (duplicate) {
errors.push(
`Column "${column.name}" must be unique. Value "${value}" already exists in row ${duplicate.id}`
)
}
}
return {
valid: errors.length === 0,
errors,
}
}