validation

This commit is contained in:
Lakee Sivaraya
2026-01-15 16:18:54 -08:00
parent 0a6312dbac
commit 466559578e
4 changed files with 220 additions and 363 deletions

View File

@@ -1,12 +1,8 @@
/**
* @vitest-environment node
*
* Validation Schema Unit Tests
*
* Tests for table schema validation utilities.
*/
import { describe, expect, it } from 'vitest'
import { TABLE_LIMITS } from '../constants'
import { TABLE_LIMITS } from './constants'
import {
type ColumnDefinition,
getUniqueColumns,
@@ -17,9 +13,9 @@ import {
validateTableName,
validateTableSchema,
validateUniqueConstraints,
} from './schema'
} from './validation'
describe('Validation Schema', () => {
describe('Validation', () => {
describe('validateTableName', () => {
it('should accept valid table names', () => {
const validNames = ['users', 'user_data', '_private', 'Users123', 'a']

View File

@@ -1,14 +1,174 @@
/**
* Validation utilities for table schemas and row data.
*
* @module lib/table/validation/schema
*/
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS } from '../constants'
import type { ColumnDefinition, RowData, TableSchema, ValidationResult } from '../types'
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { COLUMN_TYPES, NAME_PATTERN, TABLE_LIMITS } from './constants'
import type { ColumnDefinition, RowData, TableSchema, ValidationResult } from './types'
export type { ColumnDefinition, TableSchema, ValidationResult }
type ValidationSuccess = { valid: true }
type ValidationFailure = { valid: false; response: NextResponse }
/** Options for validating a single row. */
export interface ValidateRowOptions {
rowData: RowData
schema: TableSchema
tableId: string
excludeRowId?: string
checkUnique?: boolean
}
/** Error information for a single row in batch validation. */
export interface BatchRowError {
row: number
errors: string[]
}
/** Options for validating multiple rows in batch. */
export interface ValidateBatchRowsOptions {
rows: RowData[]
schema: TableSchema
tableId: string
checkUnique?: boolean
}
/**
* Validates a single row (size, schema, unique constraints) and returns a formatted response on failure.
*/
export async function validateRowData(
options: ValidateRowOptions
): Promise<ValidationSuccess | ValidationFailure> {
const { rowData, schema, tableId, excludeRowId, checkUnique = true } = options
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
),
}
}
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Row data does not match schema', details: schemaValidation.errors },
{ status: 400 }
),
}
}
if (checkUnique) {
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length > 0) {
const existingRows = await db
.select({ id: userTableRows.id, data: userTableRows.data })
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
const uniqueValidation = validateUniqueConstraints(
rowData,
schema,
existingRows.map((r) => ({ id: r.id, data: r.data as RowData })),
excludeRowId
)
if (!uniqueValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
{ status: 400 }
),
}
}
}
}
return { valid: true }
}
/**
* Validates multiple rows for batch insert (size, schema, unique constraints including within batch).
*/
export async function validateBatchRows(
options: ValidateBatchRowsOptions
): Promise<ValidationSuccess | ValidationFailure> {
const { rows, schema, tableId, checkUnique = true } = options
const errors: BatchRowError[] = []
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
errors.push({ row: i, errors: sizeValidation.errors })
continue
}
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
errors.push({ row: i, errors: schemaValidation.errors })
}
}
if (errors.length > 0) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Validation failed for some rows', details: errors },
{ status: 400 }
),
}
}
if (checkUnique) {
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length > 0) {
const existingRows = await db
.select({ id: userTableRows.id, data: userTableRows.data })
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
const batchRows = rows.slice(0, i).map((data, idx) => ({ id: `batch_${idx}`, data }))
const uniqueValidation = validateUniqueConstraints(rowData, schema, [
...existingRows.map((r) => ({ id: r.id, data: r.data as RowData })),
...batchRows,
])
if (!uniqueValidation.valid) {
errors.push({ row: i, errors: uniqueValidation.errors })
}
}
if (errors.length > 0) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violations in batch', details: errors },
{ status: 400 }
),
}
}
}
}
return { valid: true }
}
/** Validates table name format and length. */
export function validateTableName(name: string): ValidationResult {
const errors: string[] = []
@@ -29,44 +189,10 @@ export function validateTableName(name: string): ValidationResult {
)
}
return {
valid: errors.length === 0,
errors,
}
}
export function validateColumnDefinition(column: ColumnDefinition): ValidationResult {
const errors: string[] = []
if (!column.name || typeof column.name !== 'string') {
errors.push('Column name is required')
return { valid: false, errors }
}
if (column.name.length > TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH) {
errors.push(
`Column name "${column.name}" exceeds maximum length (${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters)`
)
}
if (!NAME_PATTERN.test(column.name)) {
errors.push(
`Column name "${column.name}" must start with letter or underscore, followed by alphanumeric or underscore`
)
}
if (!COLUMN_TYPES.includes(column.type)) {
errors.push(
`Column "${column.name}" has invalid type "${column.type}". Valid types: ${COLUMN_TYPES.join(', ')}`
)
}
return {
valid: errors.length === 0,
errors,
}
return { valid: errors.length === 0, errors }
}
/** Validates table schema structure and column definitions. */
export function validateTableSchema(schema: TableSchema): ValidationResult {
const errors: string[] = []
@@ -99,23 +225,10 @@ export function validateTableSchema(schema: TableSchema): ValidationResult {
errors.push('Duplicate column names found')
}
return {
valid: errors.length === 0,
errors,
}
}
export function validateRowSize(data: RowData): ValidationResult {
const size = JSON.stringify(data).length
if (size > TABLE_LIMITS.MAX_ROW_SIZE_BYTES) {
return {
valid: false,
errors: [`Row size exceeds limit (${size} bytes > ${TABLE_LIMITS.MAX_ROW_SIZE_BYTES} bytes)`],
}
}
return { valid: true, errors: [] }
return { valid: errors.length === 0, errors }
}
/** Validates row data matches schema column types and required fields. */
export function validateRowAgainstSchema(data: RowData, schema: TableSchema): ValidationResult {
const errors: string[] = []
@@ -165,25 +278,31 @@ export function validateRowAgainstSchema(data: RowData, schema: TableSchema): Va
}
}
return {
valid: errors.length === 0,
errors,
}
return { valid: errors.length === 0, errors }
}
/** Validates row data size is within limits. */
export function validateRowSize(data: RowData): ValidationResult {
const size = JSON.stringify(data).length
if (size > TABLE_LIMITS.MAX_ROW_SIZE_BYTES) {
return {
valid: false,
errors: [`Row size exceeds limit (${size} bytes > ${TABLE_LIMITS.MAX_ROW_SIZE_BYTES} bytes)`],
}
}
return { valid: true, errors: [] }
}
/** Returns columns with unique constraint. */
export function getUniqueColumns(schema: TableSchema): ColumnDefinition[] {
return schema.columns.filter((col) => col.unique === true)
}
interface ExistingRow {
id: string
data: RowData
}
/** Validates unique constraints against existing rows. */
export function validateUniqueConstraints(
data: RowData,
schema: TableSchema,
existingRows: ExistingRow[],
existingRows: { id: string; data: RowData }[],
excludeRowId?: string
): ValidationResult {
const errors: string[] = []
@@ -191,15 +310,10 @@ export function validateUniqueConstraints(
for (const column of uniqueColumns) {
const value = data[column.name]
if (value === null || value === undefined) {
continue
}
if (value === null || value === undefined) continue
const duplicate = existingRows.find((row) => {
if (excludeRowId && row.id === excludeRowId) {
return false
}
if (excludeRowId && row.id === excludeRowId) return false
const existingValue = row.data[column.name]
if (typeof value === 'string' && typeof existingValue === 'string') {
@@ -215,8 +329,35 @@ export function validateUniqueConstraints(
}
}
return {
valid: errors.length === 0,
errors,
}
return { valid: errors.length === 0, errors }
}
/** Validates column definition format and type. */
export function validateColumnDefinition(column: ColumnDefinition): ValidationResult {
const errors: string[] = []
if (!column.name || typeof column.name !== 'string') {
errors.push('Column name is required')
return { valid: false, errors }
}
if (column.name.length > TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH) {
errors.push(
`Column name "${column.name}" exceeds maximum length (${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters)`
)
}
if (!NAME_PATTERN.test(column.name)) {
errors.push(
`Column name "${column.name}" must start with letter or underscore, followed by alphanumeric or underscore`
)
}
if (!COLUMN_TYPES.includes(column.type)) {
errors.push(
`Column "${column.name}" has invalid type "${column.type}". Valid types: ${COLUMN_TYPES.join(', ')}`
)
}
return { valid: errors.length === 0, errors }
}

View File

@@ -1,272 +0,0 @@
/**
* High-level validation helpers for table row operations.
*
* These helpers consolidate common validation patterns (size, schema, uniqueness)
* into reusable functions that return formatted error responses.
*
* @module lib/table/validation/helpers
*/
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import type { RowData, TableSchema } from '../types'
import {
getUniqueColumns,
validateRowAgainstSchema,
validateRowSize,
validateUniqueConstraints,
} from './schema'
/**
* Result of a successful row validation.
*/
interface ValidationSuccess {
valid: true
}
/**
* Result of a failed row validation with pre-formatted response.
*/
interface ValidationFailure {
valid: false
response: NextResponse
}
/**
* Options for single row validation.
*/
export interface ValidateRowOptions {
/** The row data to validate */
rowData: RowData
/** The table schema to validate against */
schema: TableSchema
/** The table ID (required for unique constraint checking) */
tableId: string
/** Row ID to exclude from unique checks (for updates) */
excludeRowId?: string
/** Whether to check unique constraints (default: true) */
checkUnique?: boolean
}
/**
* Validates a single row against size limits, schema, and unique constraints.
*
* This function consolidates the common validation pattern used across
* insert, update, and upsert operations into a single reusable helper.
*
* @param options - Validation options
* @returns Either success or a failure with pre-formatted error response
*
* @example
* ```typescript
* const result = await validateRowData({
* rowData: validated.data,
* schema: table.schema,
* tableId,
* checkUnique: true,
* })
*
* if (!result.valid) return result.response
* // Proceed with insert/update
* ```
*/
export async function validateRowData(
options: ValidateRowOptions
): Promise<ValidationSuccess | ValidationFailure> {
const { rowData, schema, tableId, excludeRowId, checkUnique = true } = options
// 1. Validate row size
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
),
}
}
// 2. Validate row against schema
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Row data does not match schema', details: schemaValidation.errors },
{ status: 400 }
),
}
}
// 3. Check unique constraints if requested
if (checkUnique) {
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length > 0) {
const existingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
const uniqueValidation = validateUniqueConstraints(
rowData,
schema,
existingRows.map((r) => ({ id: r.id, data: r.data as RowData })),
excludeRowId
)
if (!uniqueValidation.valid) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violation', details: uniqueValidation.errors },
{ status: 400 }
),
}
}
}
}
return { valid: true }
}
/**
* Error structure for batch row validation.
*/
export interface BatchRowError {
row: number
errors: string[]
}
/**
* Result of a successful batch validation.
*/
interface BatchValidationSuccess {
valid: true
}
/**
* Result of a failed batch validation with pre-formatted response.
*/
interface BatchValidationFailure {
valid: false
response: NextResponse
}
/**
* Options for batch row validation.
*/
export interface ValidateBatchRowsOptions {
/** Array of row data to validate */
rows: RowData[]
/** The table schema to validate against */
schema: TableSchema
/** The table ID (required for unique constraint checking) */
tableId: string
/** Whether to check unique constraints (default: true) */
checkUnique?: boolean
}
/**
* Validates multiple rows for batch insert operations.
*
* Performs size and schema validation on all rows, then checks unique
* constraints against both existing rows and other rows in the batch.
*
* @param options - Batch validation options
* @returns Either success or a failure with pre-formatted error response
*
* @example
* ```typescript
* const result = await validateBatchRows({
* rows: validated.rows,
* schema: table.schema,
* tableId,
* })
*
* if (!result.valid) return result.response
* // Proceed with batch insert
* ```
*/
export async function validateBatchRows(
options: ValidateBatchRowsOptions
): Promise<BatchValidationSuccess | BatchValidationFailure> {
const { rows, schema, tableId, checkUnique = true } = options
const errors: BatchRowError[] = []
// 1. Validate size and schema for all rows
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
const sizeValidation = validateRowSize(rowData)
if (!sizeValidation.valid) {
errors.push({ row: i, errors: sizeValidation.errors })
continue
}
const schemaValidation = validateRowAgainstSchema(rowData, schema)
if (!schemaValidation.valid) {
errors.push({ row: i, errors: schemaValidation.errors })
}
}
if (errors.length > 0) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Validation failed for some rows', details: errors },
{ status: 400 }
),
}
}
// 2. Check unique constraints if requested
if (checkUnique) {
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length > 0) {
const existingRows = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(eq(userTableRows.tableId, tableId))
for (let i = 0; i < rows.length; i++) {
const rowData = rows[i]
// Check against other rows in the batch (before this one)
const batchRows = rows.slice(0, i).map((data, idx) => ({
id: `batch_${idx}`,
data,
}))
const uniqueValidation = validateUniqueConstraints(rowData, schema, [
...existingRows.map((r) => ({ id: r.id, data: r.data as RowData })),
...batchRows,
])
if (!uniqueValidation.valid) {
errors.push({ row: i, errors: uniqueValidation.errors })
}
}
if (errors.length > 0) {
return {
valid: false,
response: NextResponse.json(
{ error: 'Unique constraint violations in batch', details: errors },
{ status: 400 }
),
}
}
}
}
return { valid: true }
}

View File

@@ -1,8 +0,0 @@
/**
* Validation utilities for table schemas and row data.
*
* @module lib/table/validation
*/
export * from './helpers'
export * from './schema'