This commit is contained in:
Lakee Sivaraya
2026-01-14 13:08:35 -08:00
parent e287388b03
commit dfa018f2d4
7 changed files with 68 additions and 315 deletions

View File

@@ -8,7 +8,7 @@ import {
COMPARISON_OPERATORS,
conditionsToJsonString,
type FilterCondition,
generateFilterId,
generateId,
jsonStringToConditions,
LOGICAL_OPERATORS,
} from '@/lib/table/filter-builder-utils'
@@ -35,7 +35,7 @@ interface FilterFormatProps {
* Creates a new filter condition with default values
*/
const createDefaultCondition = (columns: ComboboxOption[]): FilterCondition => ({
id: generateFilterId(),
id: generateId(),
logicalOperator: 'and',
column: columns[0]?.value || '',
operator: 'eq',

View File

@@ -4,7 +4,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Plus, X } from 'lucide-react'
import { Button, Combobox, type ComboboxOption } from '@/components/emcn'
import {
generateSortId,
generateId,
jsonStringToSortConditions,
SORT_DIRECTIONS,
type SortCondition,
@@ -30,7 +30,7 @@ interface SortFormatProps {
* Creates a new sort condition with default values
*/
const createDefaultCondition = (columns: ComboboxOption[]): SortCondition => ({
id: generateSortId(),
id: generateId(),
column: columns[0]?.value || '',
direction: 'asc',
})

View File

@@ -113,6 +113,7 @@ export function useCreateTable(workspaceId: string) {
return res.json()
},
onSuccess: () => {
// Invalidate the tables list query to refresh the UI
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})
@@ -146,6 +147,7 @@ export function useDeleteTable(workspaceId: string) {
return res.json()
},
onSuccess: () => {
// Invalidate the tables list query to refresh the UI
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})

View File

@@ -4,9 +4,6 @@
* @module lib/table/constants
*/
/**
* Table and column limits for user-defined tables.
*/
export const TABLE_LIMITS = {
MAX_TABLES_PER_WORKSPACE: 100,
MAX_ROWS_PER_TABLE: 10000,
@@ -20,19 +17,8 @@ export const TABLE_LIMITS = {
MAX_QUERY_LIMIT: 1000,
} as const
/**
* Valid column types for table schema.
*/
export const COLUMN_TYPES = ['string', 'number', 'boolean', 'date', 'json'] as const
/**
* Type representing a valid column type.
*/
export type ColumnType = (typeof COLUMN_TYPES)[number]
/**
* Regex pattern for valid table and column names.
*
* Must start with letter or underscore, followed by alphanumeric or underscore.
*/
export const NAME_PATTERN = /^[a-z_][a-z0-9_]*$/i

View File

@@ -1,19 +1,11 @@
/**
* Shared utilities for filter builder UI components.
*
* Used by both the table data viewer and the block editor filter-format component.
*
* @module lib/table/filter-builder-utils
*/
/**
* JSON-serializable value types.
*/
type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue }
/**
* Available comparison operators for filter conditions.
*/
export const COMPARISON_OPERATORS = [
{ value: 'eq', label: 'equals' },
{ value: 'ne', label: 'not equals' },
@@ -25,44 +17,34 @@ export const COMPARISON_OPERATORS = [
{ value: 'in', label: 'in array' },
] as const
/**
* Logical operators for combining filter conditions.
*/
export const LOGICAL_OPERATORS = [
{ value: 'and', label: 'and' },
{ value: 'or', label: 'or' },
] as const
/**
* Represents a single filter condition in builder format.
*/
export interface FilterCondition {
/** Unique identifier for the condition */
id: string
/** Logical operator to combine with previous condition */
logicalOperator: 'and' | 'or'
/** Column name to filter on */
column: string
/** Comparison operator */
operator: string
/** Filter value as string */
value: string
}
/**
* Generates a unique ID for filter conditions.
* Generates a unique ID for filter or sort conditions.
* Used as React keys for list items in the builder UI.
*
* @returns Random alphanumeric string
*/
export function generateFilterId(): string {
export function generateId(): string {
return Math.random().toString(36).substring(2, 9)
}
/**
* Parses a value string into its appropriate type.
* Parses a string value into its appropriate type based on the operator.
*
* @param value - The string value to parse
* @param operator - The operator being used (affects parsing for 'in')
* @param value - String value to parse
* @param operator - Operator being used (affects parsing for 'in')
* @returns Parsed value (string, number, boolean, null, or array)
*/
function parseValue(value: string, operator: string): JsonValue {
@@ -137,7 +119,6 @@ export function filterToConditions(filter: Record<string, JsonValue> | null): Fi
const conditions: FilterCondition[] = []
// Handle $or at the top level
if (filter.$or && Array.isArray(filter.$or)) {
filter.$or.forEach((orGroup, groupIndex) => {
if (typeof orGroup !== 'object' || orGroup === null || Array.isArray(orGroup)) {
@@ -159,7 +140,6 @@ export function filterToConditions(filter: Record<string, JsonValue> | null): Fi
return conditions
}
// Handle simple filter (all AND conditions)
return parseFilterGroup(filter)
}
@@ -176,11 +156,10 @@ function parseFilterGroup(group: Record<string, JsonValue>): FilterCondition[] {
if (column === '$or' || column === '$and') continue
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
// Operator-based condition
for (const [op, opValue] of Object.entries(value)) {
if (op.startsWith('$')) {
conditions.push({
id: generateFilterId(),
id: generateId(),
logicalOperator: 'and',
column,
operator: op.substring(1),
@@ -189,9 +168,8 @@ function parseFilterGroup(group: Record<string, JsonValue>): FilterCondition[] {
}
}
} else {
// Direct equality
conditions.push({
id: generateFilterId(),
id: generateId(),
logicalOperator: 'and',
column,
operator: 'eq',
@@ -245,35 +223,17 @@ export function jsonStringToConditions(jsonString: string): FilterCondition[] {
}
}
/**
* Sort direction options.
*/
export const SORT_DIRECTIONS = [
{ value: 'asc', label: 'ascending' },
{ value: 'desc', label: 'descending' },
] as const
/**
* Represents a single sort condition in builder format.
*/
export interface SortCondition {
/** Unique identifier for the sort condition */
id: string
/** Column name to sort by */
column: string
/** Sort direction */
direction: 'asc' | 'desc'
}
/**
* Generates a unique ID for sort conditions.
*
* @returns Random alphanumeric string
*/
export function generateSortId(): string {
return Math.random().toString(36).substring(2, 9)
}
/**
* Converts builder sort conditions to sort object.
*
@@ -303,7 +263,7 @@ export function sortToConditions(sort: Record<string, string> | null): SortCondi
if (!sort) return []
return Object.entries(sort).map(([column, direction]) => ({
id: generateSortId(),
id: generateId(),
column,
direction: direction === 'desc' ? 'desc' : 'asc',
}))

View File

@@ -1,170 +1,92 @@
/**
* Query builder utilities for user-defined tables.
*
* Provides functions to build SQL WHERE and ORDER BY clauses for querying
* user table rows stored as JSONB in PostgreSQL. Supports filtering on
* JSONB fields using various operators ($eq, $ne, $gt, $gte, $lt, $lte, $in, $nin, $contains)
* and sorting by both JSONB fields and built-in columns (createdAt, updatedAt).
*
* IMPORTANT: For equality operations ($eq and direct value), we use the JSONB
* containment operator (@>) which can leverage the GIN index on the data column.
* For comparison operators ($gt, $lt, etc.) and pattern matching ($contains),
* we must use the text extraction operator (->>) which cannot use the GIN index.
* Uses JSONB containment operator (@>) for equality to leverage GIN index.
* Uses text extraction (->>) for comparisons and pattern matching.
*/
import type { SQL } from 'drizzle-orm'
import { sql } from 'drizzle-orm'
/**
* JSON-serializable value types.
*/
type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue }
/**
* Field condition operators for filtering.
*/
export interface FieldCondition {
/** Equality */
$eq?: JsonValue
/** Not equal */
$ne?: JsonValue
/** Greater than */
$gt?: number
/** Greater than or equal */
$gte?: number
/** Less than */
$lt?: number
/** Less than or equal */
$lte?: number
/** Value in array */
$in?: JsonValue[]
/** Value not in array */
$nin?: JsonValue[]
/** String contains (case-insensitive) */
$contains?: string
}
/**
* Query filter object supporting logical operators and field conditions.
*/
export interface QueryFilter {
/** OR conditions */
$or?: QueryFilter[]
/** AND conditions */
$and?: QueryFilter[]
/** Field conditions keyed by column name */
[key: string]: JsonValue | FieldCondition | QueryFilter[] | undefined
}
/**
* Builds a JSONB containment clause that can use the GIN index.
*
* The containment operator (@>) checks if the left JSONB value contains the right JSONB value.
* This is efficient because PostgreSQL can use a GIN index on the data column.
*
* Example: For field "age" with value 25, generates:
* `table.data @> '{"age": 25}'::jsonb`
*
* This is equivalent to: WHERE data->>'age' = '25' but can use the GIN index.
*
* @param tableName - The table alias/name (e.g., "user_tables")
* @param field - The field name within the JSONB data column
* @param value - The value to check for containment
* @returns SQL clause for containment check
* Builds a JSONB containment clause using GIN index.
* Generates: `table.data @> '{"field": value}'::jsonb`
*/
function buildContainmentClause(tableName: string, field: string, value: JsonValue): SQL {
// Build the JSONB object for containment check
// Example: { "age": 25 } becomes '{"age":25}'::jsonb
const jsonObj = JSON.stringify({ [field]: value })
return sql`${sql.raw(`${tableName}.data`)} @> ${jsonObj}::jsonb`
}
/**
* Builds SQL conditions for a single field.
*
* This function handles two types of conditions:
* 1. Direct value equality: `{ age: 25 }` -> uses containment operator (@>)
* 2. Operator-based: `{ age: { $gt: 25 } }` -> uses text extraction (->>) for comparisons
*
* The function returns an array because some operators (like $in) generate multiple conditions.
*
* @param tableName - The table alias/name
* @param field - The field name within the JSONB data column
* @param condition - Either a direct value (JsonValue) or an operator object (FieldCondition)
* @returns Array of SQL conditions (usually one, but can be multiple for $in/$nin)
*/
function buildFieldCondition(
tableName: string,
field: string,
condition: JsonValue | FieldCondition
): SQL[] {
const conditions: SQL[] = []
// Escape single quotes in field name to prevent SQL injection
// Example: "O'Brien" -> "O''Brien"
const escapedField = field.replace(/'/g, "''")
// Check if condition is an operator object (e.g., { $gt: 25 })
if (typeof condition === 'object' && condition !== null && !Array.isArray(condition)) {
// Operator-based filter: iterate through operators like $eq, $gt, etc.
for (const [op, value] of Object.entries(condition)) {
switch (op) {
case '$eq':
// Equality: uses containment operator for GIN index support
// Example: { age: { $eq: 25 } } -> data @> '{"age": 25}'::jsonb
conditions.push(buildContainmentClause(tableName, field, value as JsonValue))
break
case '$ne':
// Not equal: negation of containment
// Example: { age: { $ne: 25 } } -> NOT (data @> '{"age": 25}'::jsonb)
conditions.push(
sql`NOT (${buildContainmentClause(tableName, field, value as JsonValue)})`
)
break
case '$gt':
// Greater than: must use text extraction (->>) and cast to numeric
// Cannot use containment operator for comparisons
// Example: { age: { $gt: 25 } } -> (data->>'age')::numeric > 25
conditions.push(
sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric > ${value}`
)
break
case '$gte':
// Greater than or equal
// Example: { age: { $gte: 25 } } -> (data->>'age')::numeric >= 25
conditions.push(
sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric >= ${value}`
)
break
case '$lt':
// Less than
// Example: { age: { $lt: 25 } } -> (data->>'age')::numeric < 25
conditions.push(
sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric < ${value}`
)
break
case '$lte':
// Less than or equal
// Example: { age: { $lte: 25 } } -> (data->>'age')::numeric <= 25
conditions.push(
sql`(${sql.raw(`${tableName}.data->>'${escapedField}'`)})::numeric <= ${value}`
)
break
case '$in':
// Value in array: converts to OR of containment checks
// Example: { age: { $in: [25, 30, 35] } }
// -> (data @> '{"age": 25}'::jsonb OR data @> '{"age": 30}'::jsonb OR data @> '{"age": 35}'::jsonb)
if (Array.isArray(value) && value.length > 0) {
if (value.length === 1) {
// Single value: just use containment directly
conditions.push(buildContainmentClause(tableName, field, value[0]))
} else {
// Multiple values: create OR chain of containment checks
const inConditions = value.map((v) => buildContainmentClause(tableName, field, v))
conditions.push(sql`(${sql.join(inConditions, sql.raw(' OR '))})`)
}
@@ -172,9 +94,6 @@ function buildFieldCondition(
break
case '$nin':
// Value not in array: converts to AND of negated containment checks
// Example: { age: { $nin: [25, 30] } }
// -> (NOT (data @> '{"age": 25}'::jsonb) AND NOT (data @> '{"age": 30}'::jsonb))
if (Array.isArray(value) && value.length > 0) {
const ninConditions = value.map(
(v) => sql`NOT (${buildContainmentClause(tableName, field, v)})`
@@ -184,9 +103,6 @@ function buildFieldCondition(
break
case '$contains':
// String contains: uses ILIKE for case-insensitive pattern matching
// Example: { name: { $contains: "john" } } -> data->>'name' ILIKE '%john%'
// Note: This cannot use the GIN index, so it's slower on large datasets
conditions.push(
sql`${sql.raw(`${tableName}.data->>'${escapedField}'`)} ILIKE ${`%${value}%`}`
)
@@ -194,9 +110,6 @@ function buildFieldCondition(
}
}
} else {
// Direct equality: condition is a primitive value (string, number, boolean, null)
// Example: { age: 25 } -> data @> '{"age": 25}'::jsonb
// This uses the containment operator for optimal performance with GIN index
conditions.push(buildContainmentClause(tableName, field, condition))
}
@@ -205,96 +118,87 @@ function buildFieldCondition(
/**
* Builds a WHERE clause from a filter object.
*
* This is the main entry point for converting a QueryFilter object into SQL.
* It recursively processes the filter, handling logical operators ($or, $and) and
* field conditions.
*
* Examples:
* 1. Simple filter: `{ age: 25, name: "John" }`
* -> `(data @> '{"age": 25}'::jsonb) AND (data @> '{"name": "John"}'::jsonb)`
*
* 2. With operators: `{ age: { $gt: 25 }, name: { $contains: "john" } }`
* -> `((data->>'age')::numeric > 25) AND (data->>'name' ILIKE '%john%')`
*
* 3. With $or: `{ $or: [{ age: 25 }, { age: 30 }] }`
* -> `((data @> '{"age": 25}'::jsonb) OR (data @> '{"age": 30}'::jsonb))`
*
* Performance notes:
* - Uses GIN-index-compatible containment operator (@>) for: $eq, direct equality, $in
* - Uses text extraction (->>) for: $ne, $gt, $gte, $lt, $lte, $nin, $contains
* - Text extraction cannot use GIN index, so those queries are slower
*
* @param filter - The filter object to convert to SQL
* @param tableName - The table alias/name (e.g., "user_tables")
* @returns SQL WHERE clause or undefined if filter is empty
* Recursively processes logical operators ($or, $and) and field conditions.
*/
export function buildFilterClause(filter: QueryFilter, tableName: string): SQL | undefined {
const conditions: SQL[] = []
// Iterate through all fields in the filter object
/**
* Iterate over each field and its associated condition in the filter object.
*
* The filter is expected to be an object where keys are either field names or logical operators
* ('$or', '$and'), and values are the conditions to apply or arrays of nested filter objects.
*/
for (const [field, condition] of Object.entries(filter)) {
// Skip undefined conditions (can happen with optional fields)
// Skip undefined conditions (e.g., unused or programmatically removed filters)
if (condition === undefined) {
continue
}
// Handle $or operator: creates OR group of sub-filters
// Example: { $or: [{ age: 25 }, { name: "John" }] }
// -> (age condition) OR (name condition)
/**
* Handle the logical OR operator: { $or: [filter1, filter2, ...] }
* Recursively build SQL clauses for each sub-filter,
* then join them with an OR. If there is only one sub-filter,
* no need for OR grouping.
*/
if (field === '$or' && Array.isArray(condition)) {
const orConditions: SQL[] = []
// Recursively process each sub-filter in the OR array
for (const subFilter of condition) {
const subClause = buildFilterClause(subFilter as QueryFilter, tableName)
if (subClause) {
orConditions.push(subClause)
}
}
// Only add OR group if we have at least one condition
if (orConditions.length > 0) {
if (orConditions.length === 1) {
// Single condition: no need for parentheses
// Only one condition; no need to wrap in OR
conditions.push(orConditions[0])
} else {
// Multiple conditions: wrap in parentheses and join with OR
// Multiple conditions; join by OR
conditions.push(sql`(${sql.join(orConditions, sql.raw(' OR '))})`)
}
}
continue
}
// Handle $and operator: creates AND group of sub-filters
// Example: { $and: [{ age: { $gt: 25 } }, { name: { $contains: "john" } }] }
// -> (age condition) AND (name condition)
/**
* Handle the logical AND operator: { $and: [filter1, filter2, ...] }
* Recursively build SQL clauses for each sub-filter,
* then join them with an AND. If there is only one sub-filter,
* no need for AND grouping.
*/
if (field === '$and' && Array.isArray(condition)) {
const andConditions: SQL[] = []
// Recursively process each sub-filter in the AND array
for (const subFilter of condition) {
const subClause = buildFilterClause(subFilter as QueryFilter, tableName)
if (subClause) {
andConditions.push(subClause)
}
}
// Only add AND group if we have at least one condition
if (andConditions.length > 0) {
if (andConditions.length === 1) {
// Single condition: no need for parentheses
// Only one condition; no need to wrap in AND
conditions.push(andConditions[0])
} else {
// Multiple conditions: wrap in parentheses and join with AND
// Multiple conditions; join by AND
conditions.push(sql`(${sql.join(andConditions, sql.raw(' AND '))})`)
}
}
continue
}
// Handle regular field conditions (not $or or $and)
// This processes fields like "age", "name", etc. with their conditions
// Skip if condition is QueryFilter[] (shouldn't happen for regular fields)
/**
* If the condition is an array, but not a logical operator,
* skip it (invalid filter structure).
*/
if (Array.isArray(condition)) {
continue
}
/**
* Build conditions for regular fields.
* This delegates to buildFieldCondition, which handles comparisons like $eq, $gt, etc.
*/
const fieldConditions = buildFieldCondition(
tableName,
field,
@@ -303,35 +207,20 @@ export function buildFilterClause(filter: QueryFilter, tableName: string): SQL |
conditions.push(...fieldConditions)
}
// Return undefined if no conditions were generated
/**
* If no conditions were built, return undefined to indicate no filter.
* If only one condition exists, return it directly.
* Otherwise, join all conditions using AND.
*/
if (conditions.length === 0) return undefined
// If only one condition, return it directly (no need to join)
if (conditions.length === 1) return conditions[0]
// Multiple conditions: join with AND (default behavior)
// Example: { age: 25, name: "John" } -> condition1 AND condition2
return sql.join(conditions, sql.raw(' AND '))
}
/**
* Builds an ORDER BY clause from a sort object.
*
* Supports sorting by:
* 1. Built-in columns: createdAt, updatedAt (direct column access)
* 2. JSONB fields: any field in the data column (uses text extraction)
*
* Examples:
* - `{ createdAt: 'desc' }` -> `table.createdAt DESC`
* - `{ age: 'asc', name: 'desc' }` -> `table.data->>'age' ASC, table.data->>'name' DESC`
*
* Note: Sorting by JSONB fields uses text extraction (->>), which means:
* - Numbers are sorted as strings (e.g., "10" < "2")
* - No index can be used, so sorting is slower on large datasets
*
* @param sort - Sort object with field names as keys and 'asc'|'desc' as values
* @param tableName - The table alias/name (e.g., "user_tables")
* @returns SQL ORDER BY clause or undefined if no sort specified
* Note: JSONB fields use text extraction, so numeric sorting may not work as expected.
*/
export function buildSortClause(
sort: Record<string, 'asc' | 'desc'>,
@@ -339,26 +228,26 @@ export function buildSortClause(
): SQL | undefined {
const clauses: SQL[] = []
// Process each field in the sort object
/**
* Build ORDER BY SQL clauses based on the sort object keys and directions.
* - For `createdAt` and `updatedAt`, use the top-level table columns for proper type sorting.
* - For all other fields, treat them as keys in the table's data JSONB column.
* Extraction is performed with ->> to return text, which is then sorted.
* - Field names are escaped for safety.
*/
for (const [field, direction] of Object.entries(sort)) {
// Escape single quotes in field name to prevent SQL injection
// Example: "O'Brien" -> "O''Brien"
// Escape single quotes for SQL safety
const escapedField = field.replace(/'/g, "''")
// Check if this is a built-in column (createdAt, updatedAt)
// These are actual columns in the table, not JSONB fields
if (field === 'createdAt' || field === 'updatedAt') {
// Built-in columns: direct column access
// Example: { createdAt: 'desc' } -> table.createdAt DESC
// Use regular column for timestamp sorting
clauses.push(
direction === 'asc'
? sql.raw(`${tableName}.${escapedField} ASC`)
: sql.raw(`${tableName}.${escapedField} DESC`)
)
} else {
// JSONB fields: use text extraction operator (->>)
// Example: { age: 'asc' } -> table.data->>'age' ASC
// Note: This extracts the value as text, so numeric sorting may not work as expected
// Use text extraction for JSONB field sorting
clauses.push(
direction === 'asc'
? sql.raw(`${tableName}.data->>'${escapedField}' ASC`)
@@ -367,7 +256,5 @@ export function buildSortClause(
}
}
// Join multiple sort fields with commas
// Example: { age: 'asc', name: 'desc' } -> "age ASC, name DESC"
return clauses.length > 0 ? sql.join(clauses, sql.raw(', ')) : undefined
}

View File

@@ -7,49 +7,24 @@
import type { ColumnType } from './constants'
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS } from './constants'
/**
* Definition of a table column.
*/
export interface ColumnDefinition {
/** Column name */
name: string
/** Column data type */
type: ColumnType
/** Whether the column is required */
required?: boolean
/** Whether the column values must be unique */
unique?: boolean
}
/**
* Table schema definition.
*/
export interface TableSchema {
/** Array of column definitions */
columns: ColumnDefinition[]
}
/**
* Result of a validation operation.
*/
interface ValidationResult {
/** Whether validation passed */
valid: boolean
/** Array of error messages */
errors: string[]
}
/**
* Represents a row's data values.
*/
type RowData = Record<string, unknown>
/**
* Validates a table name against naming rules.
*
* @param name - The table name to validate
* @returns Validation result with errors if invalid
*/
export function validateTableName(name: string): ValidationResult {
const errors: string[] = []
@@ -76,12 +51,6 @@ export function validateTableName(name: string): ValidationResult {
}
}
/**
* Validates a column definition.
*
* @param column - The column definition to validate
* @returns Validation result with errors if invalid
*/
export function validateColumnDefinition(column: ColumnDefinition): ValidationResult {
const errors: string[] = []
@@ -114,12 +83,6 @@ export function validateColumnDefinition(column: ColumnDefinition): ValidationRe
}
}
/**
* Validates a table schema.
*
* @param schema - The schema to validate
* @returns Validation result with errors if invalid
*/
export function validateTableSchema(schema: TableSchema): ValidationResult {
const errors: string[] = []
@@ -141,13 +104,11 @@ export function validateTableSchema(schema: TableSchema): ValidationResult {
errors.push(`Schema exceeds maximum columns (${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE})`)
}
// Validate each column
for (const column of schema.columns) {
const columnResult = validateColumnDefinition(column)
errors.push(...columnResult.errors)
}
// Check for duplicate column names
const columnNames = schema.columns.map((c) => c.name.toLowerCase())
const uniqueNames = new Set(columnNames)
if (uniqueNames.size !== columnNames.length) {
@@ -160,12 +121,6 @@ export function validateTableSchema(schema: TableSchema): ValidationResult {
}
}
/**
* Validates that row data size is within limits.
*
* @param data - The row data to validate
* @returns Validation result with errors if size exceeds limit
*/
export function validateRowSize(data: RowData): ValidationResult {
const size = JSON.stringify(data).length
if (size > TABLE_LIMITS.MAX_ROW_SIZE_BYTES) {
@@ -177,29 +132,19 @@ export function validateRowSize(data: RowData): ValidationResult {
return { valid: true, errors: [] }
}
/**
* Validates row data against a table schema.
*
* @param data - The row data to validate
* @param schema - The schema to validate against
* @returns Validation result with errors if validation fails
*/
export function validateRowAgainstSchema(data: RowData, schema: TableSchema): ValidationResult {
const errors: string[] = []
for (const column of schema.columns) {
const value = data[column.name]
// Check required fields
if (column.required && (value === undefined || value === null)) {
errors.push(`Missing required field: ${column.name}`)
continue
}
// Skip type validation if value is null/undefined for optional fields
if (value === null || value === undefined) continue
// Type validation
switch (column.type) {
case 'string':
if (typeof value !== 'string') {
@@ -242,38 +187,15 @@ export function validateRowAgainstSchema(data: RowData, schema: TableSchema): Va
}
}
/**
* Gets all columns marked as unique from a schema.
*
* @param schema - The schema to extract unique columns from
* @returns Array of unique column definitions
*/
export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] {
return schema.columns.filter((col) => col.unique === true)
}
/**
* Represents an existing row for uniqueness checking.
*/
interface ExistingRow {
/** Row ID */
id: string
/** Row data values */
data: RowData
}
/**
* Validates unique constraints for row data.
*
* Checks if values for unique columns would violate uniqueness constraints
* when compared against existing rows.
*
* @param data - The row data to validate
* @param schema - The schema containing unique column definitions
* @param existingRows - Array of existing rows to check against
* @param excludeRowId - Optional row ID to exclude from uniqueness check (for updates)
* @returns Validation result with errors if uniqueness constraints are violated
*/
export function validateUniqueConstraints(
data: RowData,
schema: TableSchema,
@@ -286,19 +208,15 @@ export function validateUniqueConstraints(
for (const column of uniqueColumns) {
const value = data[column.name]
// Skip null/undefined values for optional unique columns
if (value === null || value === undefined) {
continue
}
// Check if value exists in other rows
const duplicate = existingRows.find((row) => {
// Skip the row being updated
if (excludeRowId && row.id === excludeRowId) {
return false
}
// Check if value matches (case-insensitive for strings)
const existingValue = row.data[column.name]
if (typeof value === 'string' && typeof existingValue === 'string') {
return value.toLowerCase() === existingValue.toLowerCase()