diff --git a/PLAN.md b/PLAN.md deleted file mode 100644 index 2c04fbb5f..000000000 --- a/PLAN.md +++ /dev/null @@ -1,250 +0,0 @@ -# Table Block Implementation Plan - -> Create a new "table" block type that enables users to define schemas and perform CRUD operations on lightweight, workspace/workflow-scoped tables stored in the existing PostgreSQL database using JSONB with application-level schema enforcement. - -## Table of Contents - -- [Architecture Overview](#architecture-overview) -- [Data Model](#data-model) -- [Implementation Files](#implementation-files) -- [Key Design Decisions](#key-design-decisions) -- [Limits and Limitations](#limits-and-limitations) -- [Implementation Checklist](#implementation-checklist) - -## Architecture Overview - -```mermaid -flowchart TB - subgraph UI [Block UI Layer] - TableBlock[Table Block] - SchemaEditor[Schema Editor SubBlock] - end - - subgraph Tools [Tool Layer] - CreateTable[table_create] - Insert[table_insert] - Select[table_select] - Update[table_update] - Delete[table_delete] - DropTable[table_drop] - end - - subgraph API [API Routes] - TableAPI["/api/tables"] - RowAPI["/api/tables/tableId/rows"] - end - - subgraph DB [PostgreSQL] - SimTable[sim_table] - SimTableRow[sim_table_row] - end - - TableBlock --> Tools - Tools --> API - API --> DB -``` - -## Data Model - -Two new tables in the existing PostgreSQL database: - -### `sim_table` - Table Definitions - -| Column | Type | Description | -|--------|------|-------------| -| id | text | Primary key | -| workspace_id | text | FK to workspace | -| workflow_id | text | FK to workflow (nullable for workspace-scope) | -| name | text | Table name (unique per scope) | -| schema | jsonb | Column definitions with types/constraints | -| created_by | text | FK to user | -| created_at | timestamp | Creation time | -| updated_at | timestamp | Last update time | - -### `sim_table_row` - Row Data - -| Column | Type | Description | -|--------|------|-------------| -| id | text | Primary key | -| table_id | text | FK to sim_table | -| data | jsonb | Row data (validated against schema) | -| created_at | timestamp | Creation time | -| updated_at | timestamp | Last update time | - -### Schema Format - -**Example schema definition:** - -```json -{ - "columns": [ - { "name": "id", "type": "string", "primaryKey": true }, - { "name": "email", "type": "string", "required": true, "unique": true }, - { "name": "age", "type": "number" }, - { "name": "active", "type": "boolean", "default": true } - ] -} -``` - -**Supported Types:** `string`, `number`, `boolean`, `date`, `json` - -## Implementation Files - -### 1. Database Schema - -- `packages/db/schema.ts` - Add `simTable` and `simTableRow` table definitions -- Generate migration for the new tables - -### 2. Tools (`apps/sim/tools/table/`) - -| File | Purpose | -|------|---------| -| `types.ts` | Type definitions for params/responses | -| `create.ts` | Create table with schema | -| `insert.ts` | Insert row(s) with schema validation | -| `select.ts` | Query rows with filtering | -| `update.ts` | Update rows with schema validation | -| `delete.ts` | Delete rows | -| `drop.ts` | Drop table | -| `index.ts` | Barrel export | - -### 3. Block Definition - -- `apps/sim/blocks/blocks/table.ts` - Block config with: - - Operation dropdown (create, insert, select, update, delete, drop) - - Scope selector (workspace/workflow) - - Table selector (for existing tables) - - Schema editor (for create operation) - - Data/query inputs (operation-specific) - -### 4. API Routes - -- `apps/sim/app/api/tables/route.ts` - Create table, list tables -- `apps/sim/app/api/tables/[tableId]/route.ts` - Get/drop table -- `apps/sim/app/api/tables/[tableId]/rows/route.ts` - CRUD on rows - -### 5. Schema Validation Library - -- `apps/sim/lib/tables/schema.ts` - Schema validation utilities -- `apps/sim/lib/tables/types.ts` - Shared types - -## Key Design Decisions - -1. **Schema Enforcement**: Application-layer validation before database writes. JSONB stores data, but every insert/update validates against the table's schema. - -2. **Concurrency**: PostgreSQL handles concurrent reads/writes natively. Row-level locking for updates. - -3. **Indexing**: GIN index on `data` column for efficient JSONB queries. Additional indexes on `table_id` for fast row lookups. - -4. **Scope Resolution**: Tables with `workflow_id = NULL` are workspace-scoped. Tables with `workflow_id` set are workflow-scoped. - -5. **Table Selector**: New SubBlock type `table-selector` that fetches available tables based on current workspace/workflow context. - -## Limits and Limitations - -### Table Limits - -| Limit | Free Plan | Pro Plan | Enterprise | -|-------|-----------|----------|------------| -| Tables per workspace | 10 | 50 | Unlimited | -| Tables per workflow | 5 | 20 | Unlimited | -| Columns per table | 50 | 100 | 200 | - -### Row Limits - -| Limit | Free Plan | Pro Plan | Enterprise | -|-------|-----------|----------|------------| -| Rows per table | 10,000 | 100,000 | 1,000,000 | -| Batch insert size | 100 | 500 | 1,000 | -| Batch update/delete size | 100 | 500 | 1,000 | - -### Size Limits - -| Limit | Value | Rationale | -|-------|-------|-----------| -| Column name length | 64 chars | PostgreSQL identifier limit | -| Table name length | 64 chars | PostgreSQL identifier limit | -| String field max length | 65,535 chars | ~64KB per text field | -| JSON field max size | 1 MB | PostgreSQL JSONB practical limit | -| Single row max size | 2 MB | Reasonable row size limit | -| Total table data size | Based on plan | Tied to workspace storage quota | - -### Query Limits - -| Limit | Value | Notes | -|-------|-------|-------| -| Default page size | 100 rows | Can be overridden up to max | -| Max page size | 1,000 rows | Prevents memory issues | -| Max filter conditions | 20 | AND/OR conditions combined | -| Query timeout | 30 seconds | Prevents long-running queries | -| Max concurrent queries per table | 50 | Rate limiting per table | - -### Schema Constraints - -| Constraint | Limit | -|------------|-------| -| Primary key columns | 1 (single column only) | -| Unique constraints | 5 per table | -| Required (NOT NULL) columns | Unlimited | -| Default values | Supported for all types | -| Foreign keys | Not supported (v1) | -| Computed columns | Not supported (v1) | -| Indexes | Auto-created for primary key and unique columns | - -### Data Type Specifications - -| Type | Storage | Min | Max | Notes | -|------|---------|-----|-----|-------| -| `string` | text | 0 chars | 65,535 chars | UTF-8 encoded | -| `number` | double precision | -1.7e308 | 1.7e308 | IEEE 754 double | -| `boolean` | boolean | - | - | true/false | -| `date` | timestamp | 4713 BC | 294276 AD | ISO 8601 format | -| `json` | jsonb | - | 1 MB | Nested objects/arrays | - -### Operational Limitations - -1. **No Transactions Across Tables**: Each operation is atomic to a single table. Cross-table transactions are not supported. - -2. **No JOINs**: Cannot join data between tables. Use workflow logic to combine data from multiple tables. - -3. **No Triggers/Hooks**: No automatic actions on insert/update/delete. Use workflow blocks for reactive logic. - -4. **No Full-Text Search**: Basic filtering only. For full-text search, use the Knowledge Base feature. - -5. **No Schema Migrations**: Schema changes require dropping and recreating the table (with data loss). Future versions may support additive migrations. - -6. **Query Complexity**: Only basic operators supported: - - Comparison: `=`, `!=`, `>`, `<`, `>=`, `<=` - - String: `LIKE`, `ILIKE`, `STARTS_WITH`, `ENDS_WITH`, `CONTAINS` - - Logical: `AND`, `OR`, `NOT` - - Null checks: `IS NULL`, `IS NOT NULL` - - Array: `IN`, `NOT IN` - -### Performance Characteristics - -| Operation | Expected Latency | Notes | -|-----------|------------------|-------| -| Insert (single row) | < 50ms | With schema validation | -| Insert (batch 100) | < 200ms | Parallel validation | -| Select (indexed) | < 20ms | Primary key or unique column | -| Select (filtered, 1K rows) | < 100ms | With GIN index | -| Update (single row) | < 50ms | By primary key | -| Delete (single row) | < 30ms | By primary key | - -### Storage Accounting - -- Table storage counts toward workspace storage quota -- Calculated as: `sum(row_data_size) + schema_overhead` -- Schema overhead: ~1KB per table -- Row overhead: ~100 bytes per row (metadata, timestamps) - -## Implementation Checklist - -- [ ] Add `simTable` and `simTableRow` to `packages/db/schema.ts` and generate migration -- [ ] Create `apps/sim/lib/tables/` with schema validation and types -- [ ] Create `apps/sim/tools/table/` with all 6 tool implementations -- [ ] Register tools in `apps/sim/tools/registry.ts` -- [ ] Create API routes for tables and rows CRUD operations -- [ ] Create `apps/sim/blocks/blocks/table.ts` block definition -- [ ] Register block in `apps/sim/blocks/registry.ts` -- [ ] Add `TableIcon` to `apps/sim/components/icons.tsx` diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts index 18a4c9aab..61a291f2f 100644 --- a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts @@ -7,7 +7,12 @@ import { z } from 'zod' import { checkHybridAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import type { TableSchema } from '@/lib/table' -import { validateRowAgainstSchema, validateRowSize } from '@/lib/table' +import { + getUniqueColumns, + validateRowAgainstSchema, + validateRowSize, + validateUniqueConstraints, +} from '@/lib/table' const logger = createLogger('TableRowAPI') @@ -210,6 +215,33 @@ export async function PATCH( ) } + // Check unique constraints if any unique columns exist + const uniqueColumns = getUniqueColumns(table.schema as TableSchema) + if (uniqueColumns.length > 0) { + // Fetch existing rows to check for uniqueness + const existingRows = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + }) + .from(userTableRows) + .where(eq(userTableRows.tableId, tableId)) + + const uniqueValidation = validateUniqueConstraints( + validated.data, + table.schema as TableSchema, + existingRows, + rowId // Exclude the current row being updated + ) + + if (!uniqueValidation.valid) { + return NextResponse.json( + { error: 'Unique constraint violation', details: uniqueValidation.errors }, + { status: 400 } + ) + } + } + // Update row const now = new Date() diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 4db02322f..af1a58fc0 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -7,7 +7,13 @@ import { z } from 'zod' import { checkHybridAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import type { QueryFilter, TableSchema } from '@/lib/table' -import { TABLE_LIMITS, validateRowAgainstSchema, validateRowSize } from '@/lib/table' +import { + getUniqueColumns, + TABLE_LIMITS, + validateRowAgainstSchema, + validateRowSize, + validateUniqueConstraints, +} from '@/lib/table' import { buildFilterClause, buildSortClause } from '@/lib/table/query-builder' const logger = createLogger('TableRowsAPI') @@ -161,6 +167,49 @@ async function handleBatchInsert(requestId: string, tableId: string, body: any, ) } + // Check unique constraints if any unique columns exist + const uniqueColumns = getUniqueColumns(table.schema as TableSchema) + if (uniqueColumns.length > 0) { + // Fetch existing rows to check for uniqueness + const existingRows = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + }) + .from(userTableRows) + .where(eq(userTableRows.tableId, tableId)) + + // Validate each row for unique constraints + for (let i = 0; i < validated.rows.length; i++) { + const rowData = validated.rows[i] + + // Also check against other rows in the batch + const batchRows = validated.rows.slice(0, i).map((data, idx) => ({ + id: `batch_${idx}`, + data, + })) + + const uniqueValidation = validateUniqueConstraints(rowData, table.schema as TableSchema, [ + ...existingRows, + ...batchRows, + ]) + + if (!uniqueValidation.valid) { + errors.push({ row: i, errors: uniqueValidation.errors }) + } + } + + if (errors.length > 0) { + return NextResponse.json( + { + error: 'Unique constraint violations in batch', + details: errors, + }, + { status: 400 } + ) + } + } + // Insert all rows const now = new Date() const rowsToInsert = validated.rows.map((data) => ({ @@ -271,6 +320,32 @@ export async function POST( ) } + // Check unique constraints if any unique columns exist + const uniqueColumns = getUniqueColumns(table.schema as TableSchema) + if (uniqueColumns.length > 0) { + // Fetch existing rows to check for uniqueness + const existingRows = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + }) + .from(userTableRows) + .where(eq(userTableRows.tableId, tableId)) + + const uniqueValidation = validateUniqueConstraints( + validated.data, + table.schema as TableSchema, + existingRows + ) + + if (!uniqueValidation.valid) { + return NextResponse.json( + { error: 'Unique constraint violation', details: uniqueValidation.errors }, + { status: 400 } + ) + } + } + // Check row count limit if (table.rowCount >= table.maxRows) { return NextResponse.json( @@ -589,6 +664,41 @@ export async function PUT( } } + // Check unique constraints if any unique columns exist + const uniqueColumns = getUniqueColumns(table.schema as TableSchema) + if (uniqueColumns.length > 0) { + // Fetch all rows (not just matching ones) to check for uniqueness + const allRows = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + }) + .from(userTableRows) + .where(eq(userTableRows.tableId, tableId)) + + // Validate each updated row for unique constraints + for (const row of matchingRows) { + const mergedData = { ...row.data, ...validated.data } + const uniqueValidation = validateUniqueConstraints( + mergedData, + table.schema as TableSchema, + allRows, + row.id // Exclude the current row being updated + ) + + if (!uniqueValidation.valid) { + return NextResponse.json( + { + error: 'Unique constraint violation', + details: uniqueValidation.errors, + affectedRowId: row.id, + }, + { status: 400 } + ) + } + } + } + // Update rows by merging existing data with new data in batches const now = new Date() const BATCH_SIZE = 100 // Smaller batch for updates since each is a separate query diff --git a/apps/sim/app/api/table/route.ts b/apps/sim/app/api/table/route.ts index dd7c26c83..a2ea2680c 100644 --- a/apps/sim/app/api/table/route.ts +++ b/apps/sim/app/api/table/route.ts @@ -19,6 +19,7 @@ const ColumnSchema = z.object({ .regex(/^[a-z_][a-z0-9_]*$/i, 'Invalid column name'), type: z.enum(['string', 'number', 'boolean', 'date', 'json']), required: z.boolean().optional().default(false), + unique: z.boolean().optional().default(false), }) const CreateTableSchema = z.object({ diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table-data-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table-data-viewer.tsx index 26cb9dbf6..49c6b9a50 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table-data-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/table-data-viewer.tsx @@ -6,6 +6,8 @@ import { useQuery } from '@tanstack/react-query' import { ChevronLeft, ChevronRight, + Columns, + Copy, Edit, Filter, HelpCircle, @@ -20,6 +22,9 @@ import { Button, Checkbox, Input, + Modal, + ModalBody, + ModalContent, Popover, PopoverContent, PopoverTrigger, @@ -61,6 +66,14 @@ interface TableData { updatedAt: string } +interface CellViewerData { + columnName: string + value: any + type: 'json' | 'text' +} + +const STRING_TRUNCATE_LENGTH = 50 + export function TableDataViewer() { const params = useParams() const router = useRouter() @@ -76,6 +89,9 @@ export function TableDataViewer() { const [showAddModal, setShowAddModal] = useState(false) const [editingRow, setEditingRow] = useState(null) const [deletingRows, setDeletingRows] = useState([]) + const [cellViewer, setCellViewer] = useState(null) + const [showSchemaModal, setShowSchemaModal] = useState(false) + const [copied, setCopied] = useState(false) // Fetch table metadata const { data: tableData, isLoading: isLoadingTable } = useQuery({ @@ -171,6 +187,18 @@ export function TableDataViewer() { setDeletingRows(Array.from(selectedRows)) }, [selectedRows]) + const handleCopyCellValue = useCallback(async () => { + if (cellViewer) { + const text = + cellViewer.type === 'json' + ? JSON.stringify(cellViewer.value, null, 2) + : String(cellViewer.value) + await navigator.clipboard.writeText(text) + setCopied(true) + setTimeout(() => setCopied(false), 2000) + } + }, [cellViewer]) + const formatValue = (value: any, type: string): string => { if (value === null || value === undefined) return '—' @@ -192,6 +220,66 @@ export function TableDataViewer() { } } + const handleCellClick = useCallback( + (e: React.MouseEvent, columnName: string, value: any, type: 'json' | 'text') => { + e.preventDefault() + e.stopPropagation() + setCellViewer({ columnName, value, type }) + }, + [] + ) + + const renderCellValue = (value: any, column: { name: string; type: string }) => { + const isNull = value === null || value === undefined + + if (isNull) { + return + } + + if (column.type === 'json') { + const jsonStr = JSON.stringify(value) + return ( + + ) + } + + if (column.type === 'boolean') { + return ( + + {value ? 'true' : 'false'} + + ) + } + + if (column.type === 'number') { + return {String(value)} + } + + // Handle long strings + const strValue = String(value) + if (strValue.length > STRING_TRUNCATE_LENGTH) { + return ( + + ) + } + + return {strValue} + } + if (isLoadingTable) { return (
@@ -229,6 +317,15 @@ export function TableDataViewer() {
+ + + + + View Schema + + +
+ +
+ + + + Column + Type + Constraints + + + + {columns.map((column) => ( + + + {column.name} + + + + {column.type} + + + +
+ {column.required && ( + + required + + )} + {column.unique && ( + + unique + + )} + {!column.required && !column.unique && ( + + )} +
+
+
+ ))} +
+
+
+
+ + + + {/* Cell Viewer Modal */} + !open && setCellViewer(null)}> + +
+
+ + {cellViewer?.columnName} + + + {cellViewer?.type === 'json' ? 'JSON' : 'Text'} + +
+
+ + +
+
+ + {cellViewer?.type === 'json' ? ( +
+                {cellViewer ? JSON.stringify(cellViewer.value, null, 2) : ''}
+              
+ ) : ( +
+ {cellViewer ? String(cellViewer.value) : ''} +
+ )} +
+
+
) } diff --git a/apps/sim/app/workspace/[workspaceId]/tables/components/table-card.tsx b/apps/sim/app/workspace/[workspaceId]/tables/components/table-card.tsx index 52148d4c9..96ff676c5 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/components/table-card.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/components/table-card.tsx @@ -2,9 +2,10 @@ import { useState } from 'react' import { createLogger } from '@sim/logger' -import { Database, MoreVertical, Trash2 } from 'lucide-react' +import { Columns, Database, MoreVertical, Trash2 } from 'lucide-react' import { useRouter } from 'next/navigation' import { + Badge, Button, Modal, ModalBody, @@ -15,6 +16,12 @@ import { PopoverContent, PopoverItem, PopoverTrigger, + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, } from '@/components/emcn' import { useDeleteTable } from '@/hooks/queries/use-tables' import type { TableDefinition } from '@/tools/table/types' @@ -29,6 +36,7 @@ interface TableCardProps { export function TableCard({ table, workspaceId }: TableCardProps) { const router = useRouter() const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false) + const [isSchemaModalOpen, setIsSchemaModalOpen] = useState(false) const [isMenuOpen, setIsMenuOpen] = useState(false) const deleteTable = useDeleteTable(workspaceId) @@ -93,12 +101,23 @@ export function TableCard({ table, workspaceId }: TableCardProps) { + { + e.stopPropagation() + setIsMenuOpen(false) + setIsSchemaModalOpen(true) + }} + > + + View Schema + { e.stopPropagation() setIsMenuOpen(false) setIsDeleteDialogOpen(true) }} + className='text-[var(--text-error)] hover:text-[var(--text-error)]' > Delete @@ -138,6 +157,79 @@ export function TableCard({ table, workspaceId }: TableCardProps) { + + + + +
+ + {table.name} + + {columnCount} columns + +
+
+ +
+ + + + Column + Type + Constraints + + + + {table.schema.columns.map((column) => ( + + + {column.name} + + + + {column.type} + + + +
+ {column.required && ( + + required + + )} + {column.unique && ( + + unique + + )} + {!column.required && !column.unique && ( + + )} +
+
+
+ ))} +
+
+
+
+
+
) } diff --git a/apps/sim/lib/table/query-builder.ts b/apps/sim/lib/table/query-builder.ts index 6c6339a8a..ab34b9cc2 100644 --- a/apps/sim/lib/table/query-builder.ts +++ b/apps/sim/lib/table/query-builder.ts @@ -6,6 +6,10 @@ * JSONB fields using various operators ($eq, $ne, $gt, $gte, $lt, $lte, $in, $nin, $contains) * and sorting by both JSONB fields and built-in columns (createdAt, updatedAt). * + * IMPORTANT: For equality operations ($eq and direct value), we use the JSONB + * containment operator (@>) which can leverage the GIN index on the data column. + * For comparison operators ($gt, $lt, etc.) and pattern matching ($contains), + * we must use the text extraction operator (->>) which cannot use the GIN index. */ import type { SQL } from 'drizzle-orm' @@ -27,15 +31,36 @@ export interface QueryFilter { } } +/** + * Build a JSONB containment clause that can use the GIN index. + * Creates: data @> '{"field": value}'::jsonb + */ +function buildContainmentClause(tableName: string, field: string, value: any): SQL { + // Build the JSONB object for containment check + const jsonObj = JSON.stringify({ [field]: value }) + return sql`${sql.raw(`${tableName}.data`)} @> ${jsonObj}::jsonb` +} + /** * Build WHERE clause from filter object * Supports: $eq, $ne, $gt, $gte, $lt, $lte, $in, $nin, $contains + * + * Uses GIN-index-compatible containment operator (@>) for: + * - $eq (equality) + * - Direct value equality + * - $in (as OR of containment checks) + * + * Uses text extraction (->>) for operators that require it: + * - $ne (not equals - no containment equivalent) + * - $gt, $gte, $lt, $lte (numeric comparisons) + * - $nin (not in) + * - $contains (pattern matching) */ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | undefined { const conditions: SQL[] = [] for (const [field, condition] of Object.entries(filter)) { - // Escape field name to prevent SQL injection + // Escape field name to prevent SQL injection (for ->> operators) const escapedField = field.replace(/'/g, "''") if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) { @@ -43,16 +68,15 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | for (const [op, value] of Object.entries(condition)) { switch (op) { case '$eq': - conditions.push( - sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ${String(value)}` - ) + // Use containment operator for GIN index support + conditions.push(buildContainmentClause(tableName, field, value)) break case '$ne': - conditions.push( - sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} != ${String(value)}` - ) + // NOT containment - still uses GIN index for the containment check + conditions.push(sql`NOT (${buildContainmentClause(tableName, field, value)})`) break case '$gt': + // Numeric comparison requires text extraction (no GIN support) conditions.push( sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric > ${value}` ) @@ -73,22 +97,29 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | ) break case '$in': + // Use OR of containment checks for GIN index support if (Array.isArray(value) && value.length > 0) { - const valuesList = value.map((v) => String(v)) - conditions.push( - sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ANY(${valuesList})` - ) + if (value.length === 1) { + // Single value - just use containment + conditions.push(buildContainmentClause(tableName, field, value[0])) + } else { + // Multiple values - OR of containment checks + const inConditions = value.map((v) => buildContainmentClause(tableName, field, v)) + conditions.push(sql`(${sql.join(inConditions, sql.raw(' OR '))})`) + } } break case '$nin': + // NOT IN requires checking none of the values match if (Array.isArray(value) && value.length > 0) { - const valuesList = value.map((v) => String(v)) - conditions.push( - sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} != ALL(${valuesList})` + const ninConditions = value.map( + (v) => sql`NOT (${buildContainmentClause(tableName, field, v)})` ) + conditions.push(sql`(${sql.join(ninConditions, sql.raw(' AND '))})`) } break case '$contains': + // Pattern matching requires text extraction (no GIN support) conditions.push( sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} ILIKE ${`%${value}%`}` ) @@ -96,10 +127,8 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | } } } else { - // Direct equality - conditions.push( - sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} = ${String(condition)}` - ) + // Direct equality - use containment operator for GIN index support + conditions.push(buildContainmentClause(tableName, field, condition)) } } diff --git a/apps/sim/lib/table/validation.ts b/apps/sim/lib/table/validation.ts index ac1baa262..bef7f41f4 100644 --- a/apps/sim/lib/table/validation.ts +++ b/apps/sim/lib/table/validation.ts @@ -205,3 +205,52 @@ export function validateRowAgainstSchema( export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] { return schema.columns.filter((col) => col.unique === true) } + +/** + * Validates unique constraints for row data + * Checks if values for unique columns would violate uniqueness + */ +export function validateUniqueConstraints( + data: Record, + schema: TableSchema, + existingRows: Array<{ id: string; data: Record }>, + excludeRowId?: string +): ValidationResult { + const errors: string[] = [] + const uniqueColumns = getUniqueColumns(schema) + + for (const column of uniqueColumns) { + const value = data[column.name] + + // Skip null/undefined values for optional unique columns + if (value === null || value === undefined) { + continue + } + + // Check if value exists in other rows + const duplicate = existingRows.find((row) => { + // Skip the row being updated + if (excludeRowId && row.id === excludeRowId) { + return false + } + + // Check if value matches (case-insensitive for strings) + const existingValue = row.data[column.name] + if (typeof value === 'string' && typeof existingValue === 'string') { + return value.toLowerCase() === existingValue.toLowerCase() + } + return value === existingValue + }) + + if (duplicate) { + errors.push( + `Column "${column.name}" must be unique. Value "${value}" already exists in row ${duplicate.id}` + ) + } + } + + return { + valid: errors.length === 0, + errors, + } +}