mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-22 13:28:04 -05:00
updates
This commit is contained in:
@@ -6,12 +6,15 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { checkTableAccess, checkTableWriteAccess } from '../utils'
|
||||
import { checkTableAccess, checkTableWriteAccess, verifyTableWorkspace } from '../utils'
|
||||
|
||||
const logger = createLogger('TableDetailAPI')
|
||||
|
||||
/**
|
||||
* Schema for getting a table by ID
|
||||
*/
|
||||
const GetTableSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -32,6 +35,11 @@ export async function GET(
|
||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const validated = GetTableSchema.parse({
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
})
|
||||
|
||||
// Check table access (similar to knowledge base access control)
|
||||
const accessCheck = await checkTableAccess(tableId, authResult.userId)
|
||||
|
||||
@@ -46,6 +54,17 @@ export async function GET(
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Security check: If workspaceId is provided, verify it matches the table's workspace
|
||||
if (validated.workspaceId) {
|
||||
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
|
||||
if (!isValidWorkspace) {
|
||||
logger.warn(
|
||||
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${accessCheck.table.workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
// Get table (workspaceId validation is now handled by access check)
|
||||
const [table] = await db
|
||||
.select()
|
||||
@@ -60,22 +79,25 @@ export async function GET(
|
||||
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: {
|
||||
columns: (table.schema as any).columns.map((col: any) => ({
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
})),
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: {
|
||||
columns: (table.schema as any).columns.map((col: any) => ({
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
})),
|
||||
},
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt: table.createdAt.toISOString(),
|
||||
updatedAt: table.updatedAt.toISOString(),
|
||||
},
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt: table.createdAt.toISOString(),
|
||||
updatedAt: table.updatedAt.toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
@@ -139,8 +161,10 @@ export async function DELETE(
|
||||
logger.info(`[${requestId}] Deleted table ${tableId} for user ${authResult.userId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Table deleted successfully',
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Table deleted successfully',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -17,17 +17,26 @@ import { checkTableAccess, checkTableWriteAccess, verifyTableWorkspace } from '.
|
||||
|
||||
const logger = createLogger('TableRowAPI')
|
||||
|
||||
/**
|
||||
* Schema for getting a single row by ID
|
||||
*/
|
||||
const GetRowSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for updating a single row
|
||||
*/
|
||||
const UpdateRowSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any()),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for deleting a single row
|
||||
*/
|
||||
const DeleteRowSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -103,11 +112,14 @@ export async function GET(
|
||||
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
@@ -250,13 +262,16 @@ export async function PATCH(
|
||||
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row updated successfully',
|
||||
},
|
||||
message: 'Row updated successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -345,8 +360,11 @@ export async function DELETE(
|
||||
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Row deleted successfully',
|
||||
deletedCount: 1,
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Row deleted successfully',
|
||||
deletedCount: 1,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -19,35 +19,83 @@ import { checkTableAccess, checkTableWriteAccess, verifyTableWorkspace } from '.
|
||||
|
||||
const logger = createLogger('TableRowsAPI')
|
||||
|
||||
/**
|
||||
* Schema for inserting a single row into a table
|
||||
*/
|
||||
const InsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any()),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for batch inserting multiple rows
|
||||
*
|
||||
* Limits:
|
||||
* - Maximum 1000 rows per batch
|
||||
*/
|
||||
const BatchInsertRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
rows: z.array(z.record(z.any())).min(1).max(1000), // Max 1000 rows per batch
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
rows: z
|
||||
.array(z.record(z.any()), { required_error: 'Rows array is required' })
|
||||
.min(1, 'At least one row is required')
|
||||
.max(1000, 'Cannot insert more than 1000 rows per batch'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for querying rows with filtering, sorting, and pagination
|
||||
*/
|
||||
const QueryRowsSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
filter: z.record(z.any()).optional(),
|
||||
sort: z.record(z.enum(['asc', 'desc'])).optional(),
|
||||
limit: z.coerce.number().int().min(1).max(TABLE_LIMITS.MAX_QUERY_LIMIT).optional().default(100),
|
||||
offset: z.coerce.number().int().min(0).optional().default(0),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
|
||||
.optional()
|
||||
.default(100),
|
||||
offset: z.coerce
|
||||
.number({ required_error: 'Offset must be a number' })
|
||||
.int('Offset must be an integer')
|
||||
.min(0, 'Offset must be 0 or greater')
|
||||
.optional()
|
||||
.default(0),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for updating multiple rows by filter criteria
|
||||
*
|
||||
* Limits:
|
||||
* - Maximum 1000 rows can be updated per operation (safety limit)
|
||||
*/
|
||||
const UpdateRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
filter: z.record(z.any()), // Required - must specify what to update
|
||||
data: z.record(z.any()), // New data to set
|
||||
limit: z.coerce.number().int().min(1).max(1000).optional(), // Safety limit for bulk updates
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
filter: z.record(z.any(), { required_error: 'Filter criteria is required' }),
|
||||
data: z.record(z.any(), { required_error: 'Update data is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot update more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for deleting multiple rows by filter criteria
|
||||
*
|
||||
* Limits:
|
||||
* - Maximum 1000 rows can be deleted per operation (safety limit)
|
||||
*/
|
||||
const DeleteRowsByFilterSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
filter: z.record(z.any()), // Required - must specify what to delete
|
||||
limit: z.coerce.number().int().min(1).max(1000).optional(), // Safety limit for bulk deletes
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
filter: z.record(z.any(), { required_error: 'Filter criteria is required' }),
|
||||
limit: z.coerce
|
||||
.number({ required_error: 'Limit must be a number' })
|
||||
.int('Limit must be an integer')
|
||||
.min(1, 'Limit must be at least 1')
|
||||
.max(1000, 'Cannot delete more than 1000 rows per operation')
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -205,14 +253,17 @@ async function handleBatchInsert(requestId: string, tableId: string, body: any,
|
||||
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
rows: insertedRows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
insertedCount: insertedRows.length,
|
||||
message: `Successfully inserted ${insertedRows.length} rows`,
|
||||
success: true,
|
||||
data: {
|
||||
rows: insertedRows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
insertedCount: insertedRows.length,
|
||||
message: `Successfully inserted ${insertedRows.length} rows`,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -364,13 +415,16 @@ export async function POST(
|
||||
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: row.id,
|
||||
data: row.data,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
updatedAt: row.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Row inserted successfully',
|
||||
},
|
||||
message: 'Row inserted successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -510,16 +564,19 @@ export async function GET(
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
success: true,
|
||||
data: {
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
createdAt: r.createdAt.toISOString(),
|
||||
updatedAt: r.updatedAt.toISOString(),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -633,8 +690,11 @@ export async function PUT(
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
message: 'No rows matched the filter criteria',
|
||||
updatedCount: 0,
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
updatedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
@@ -722,9 +782,12 @@ export async function PUT(
|
||||
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Rows updated successfully',
|
||||
updatedCount: matchingRows.length,
|
||||
updatedRowIds: matchingRows.map((r) => r.id),
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows updated successfully',
|
||||
updatedCount: matchingRows.length,
|
||||
updatedRowIds: matchingRows.map((r) => r.id),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -819,8 +882,11 @@ export async function DELETE(
|
||||
if (matchingRows.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
message: 'No rows matched the filter criteria',
|
||||
deletedCount: 0,
|
||||
success: true,
|
||||
data: {
|
||||
message: 'No rows matched the filter criteria',
|
||||
deletedCount: 0,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
@@ -866,9 +932,12 @@ export async function DELETE(
|
||||
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Rows deleted successfully',
|
||||
deletedCount: matchingRows.length,
|
||||
deletedRowIds: rowIds,
|
||||
success: true,
|
||||
data: {
|
||||
message: 'Rows deleted successfully',
|
||||
deletedCount: matchingRows.length,
|
||||
deletedRowIds: rowIds,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -12,9 +12,15 @@ import { checkTableWriteAccess, verifyTableWorkspace } from '../../utils'
|
||||
|
||||
const logger = createLogger('TableUpsertAPI')
|
||||
|
||||
/**
|
||||
* Schema for upserting a row (insert or update based on unique column constraints)
|
||||
*
|
||||
* If a row with matching unique field(s) exists, it will be updated.
|
||||
* Otherwise, a new row will be inserted.
|
||||
*/
|
||||
const UpsertRowSchema = z.object({
|
||||
workspaceId: z.string().min(1).optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any()),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required').optional(), // Optional for backward compatibility, validated via table access
|
||||
data: z.record(z.any(), { required_error: 'Row data is required' }),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -158,14 +164,17 @@ export async function POST(
|
||||
logger.info(`[${requestId}] Upserted (updated) row ${updatedRow.id} in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: updatedRow.id,
|
||||
data: updatedRow.data,
|
||||
createdAt: updatedRow.createdAt.toISOString(),
|
||||
updatedAt: updatedRow.updatedAt.toISOString(),
|
||||
},
|
||||
operation: 'update',
|
||||
message: 'Row updated successfully',
|
||||
},
|
||||
operation: 'update',
|
||||
message: 'Row updated successfully',
|
||||
})
|
||||
}
|
||||
// Insert new row
|
||||
@@ -193,14 +202,17 @@ export async function POST(
|
||||
logger.info(`[${requestId}] Upserted (inserted) row ${insertedRow.id} in table ${tableId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
row: {
|
||||
id: insertedRow.id,
|
||||
data: insertedRow.data,
|
||||
createdAt: insertedRow.createdAt.toISOString(),
|
||||
updatedAt: insertedRow.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
row: {
|
||||
id: insertedRow.id,
|
||||
data: insertedRow.data,
|
||||
createdAt: insertedRow.createdAt.toISOString(),
|
||||
updatedAt: insertedRow.updatedAt.toISOString(),
|
||||
},
|
||||
operation: 'insert',
|
||||
message: 'Row inserted successfully',
|
||||
},
|
||||
operation: 'insert',
|
||||
message: 'Row inserted successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -11,32 +11,69 @@ import type { TableSchema } from '@/lib/table/validation'
|
||||
|
||||
const logger = createLogger('TableAPI')
|
||||
|
||||
/**
|
||||
* Schema for table column definition
|
||||
*/
|
||||
const ColumnSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH)
|
||||
.regex(/^[a-z_][a-z0-9_]*$/i, 'Invalid column name'),
|
||||
type: z.enum(['string', 'number', 'boolean', 'date', 'json']),
|
||||
.min(1, 'Column name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
|
||||
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
|
||||
errorMap: () => ({
|
||||
message: 'Column type must be one of: string, number, boolean, date, json',
|
||||
}),
|
||||
}),
|
||||
required: z.boolean().optional().default(false),
|
||||
unique: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for creating a new table
|
||||
*/
|
||||
const CreateTableSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(TABLE_LIMITS.MAX_TABLE_NAME_LENGTH)
|
||||
.regex(/^[a-z_][a-z0-9_]*$/i, 'Invalid table name'),
|
||||
description: z.string().max(TABLE_LIMITS.MAX_DESCRIPTION_LENGTH).optional(),
|
||||
.min(1, 'Table name is required')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
|
||||
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
|
||||
)
|
||||
.regex(
|
||||
/^[a-z_][a-z0-9_]*$/i,
|
||||
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
|
||||
),
|
||||
description: z
|
||||
.string()
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
|
||||
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
|
||||
)
|
||||
.optional(),
|
||||
schema: z.object({
|
||||
columns: z.array(ColumnSchema).min(1).max(TABLE_LIMITS.MAX_COLUMNS_PER_TABLE),
|
||||
columns: z
|
||||
.array(ColumnSchema)
|
||||
.min(1, 'Table must have at least one column')
|
||||
.max(
|
||||
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
|
||||
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
|
||||
),
|
||||
}),
|
||||
workspaceId: z.string().min(1),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Schema for listing tables in a workspace
|
||||
*/
|
||||
const ListTablesSchema = z.object({
|
||||
workspaceId: z.string().min(1),
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -205,17 +242,20 @@ export async function POST(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Created table ${tableId} in workspace ${params.workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: table.schema,
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt: table.createdAt.toISOString(),
|
||||
updatedAt: table.updatedAt.toISOString(),
|
||||
success: true,
|
||||
data: {
|
||||
table: {
|
||||
id: table.id,
|
||||
name: table.name,
|
||||
description: table.description,
|
||||
schema: table.schema,
|
||||
rowCount: table.rowCount,
|
||||
maxRows: table.maxRows,
|
||||
createdAt: table.createdAt.toISOString(),
|
||||
updatedAt: table.updatedAt.toISOString(),
|
||||
},
|
||||
message: 'Table created successfully',
|
||||
},
|
||||
message: 'Table created successfully',
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
@@ -287,20 +327,23 @@ export async function GET(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
tables: tables.map((t) => ({
|
||||
...t,
|
||||
schema: {
|
||||
columns: (t.schema as any).columns.map((col: any) => ({
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
})),
|
||||
},
|
||||
createdAt: t.createdAt.toISOString(),
|
||||
updatedAt: t.updatedAt.toISOString(),
|
||||
})),
|
||||
totalCount: tables.length,
|
||||
success: true,
|
||||
data: {
|
||||
tables: tables.map((t) => ({
|
||||
...t,
|
||||
schema: {
|
||||
columns: (t.schema as any).columns.map((col: any) => ({
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
required: col.required ?? false,
|
||||
unique: col.unique ?? false,
|
||||
})),
|
||||
},
|
||||
createdAt: t.createdAt.toISOString(),
|
||||
updatedAt: t.updatedAt.toISOString(),
|
||||
})),
|
||||
totalCount: tables.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
|
||||
@@ -95,11 +95,14 @@ export function Dropdown({
|
||||
|
||||
const dependsOnFields = useMemo(() => getDependsOnFields(dependsOn), [dependsOn])
|
||||
|
||||
// Stable empty array for when there are no dependencies
|
||||
const emptyDependencyValues = useMemo(() => [], [])
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
const dependencyValues = useSubBlockStore(
|
||||
const dependencyValuesFromStore = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return null
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
const blockValues = workflowValues[blockId] || {}
|
||||
return dependsOnFields.map((depKey) => blockValues[depKey] ?? null)
|
||||
@@ -108,6 +111,8 @@ export function Dropdown({
|
||||
)
|
||||
)
|
||||
|
||||
const dependencyValues = dependencyValuesFromStore ?? emptyDependencyValues
|
||||
|
||||
const [storeInitialized, setStoreInitialized] = useState(false)
|
||||
const [fetchedOptions, setFetchedOptions] = useState<Array<{ label: string; id: string }>>([])
|
||||
const [isLoadingOptions, setIsLoadingOptions] = useState(false)
|
||||
|
||||
@@ -3,6 +3,60 @@ import { conditionsToFilter, sortConditionsToSort } from '@/lib/table/filter-bui
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { TableQueryResponse } from '@/tools/table/types'
|
||||
|
||||
/**
|
||||
* Fetches available tables for the dropdown selector.
|
||||
* Defined outside BlockConfig to maintain stable reference and prevent infinite re-renders.
|
||||
*/
|
||||
const fetchTableOptions = async () => {
|
||||
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
|
||||
|
||||
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
|
||||
if (!workspaceId) {
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/table?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
return []
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return (data.data?.tables || []).map((table: any) => ({
|
||||
label: table.name,
|
||||
id: table.id,
|
||||
}))
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a specific table option by ID.
|
||||
* Defined outside BlockConfig to maintain stable reference and prevent infinite re-renders.
|
||||
*/
|
||||
const fetchTableOptionById = async (_blockId: string, _subBlockId: string, tableId: string) => {
|
||||
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
|
||||
|
||||
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
|
||||
if (!workspaceId) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/table?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
return null
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const table = (data.data?.tables || []).find((t: any) => t.id === tableId)
|
||||
return table ? { label: table.name, id: table.id } : null
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export const TableBlock: BlockConfig<TableQueryResponse> = {
|
||||
type: 'table',
|
||||
name: 'Table',
|
||||
@@ -41,50 +95,8 @@ export const TableBlock: BlockConfig<TableQueryResponse> = {
|
||||
placeholder: 'Select a table',
|
||||
required: true,
|
||||
options: [],
|
||||
fetchOptions: async () => {
|
||||
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
|
||||
|
||||
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
|
||||
if (!workspaceId) {
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/table?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
return []
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return (data.tables || []).map((table: any) => ({
|
||||
label: table.name,
|
||||
id: table.id,
|
||||
}))
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
},
|
||||
fetchOptionById: async (_blockId: string, _subBlockId: string, tableId: string) => {
|
||||
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
|
||||
|
||||
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
|
||||
if (!workspaceId) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/table?workspaceId=${workspaceId}`)
|
||||
if (!response.ok) {
|
||||
return null
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const table = (data.tables || []).find((t: any) => t.id === tableId)
|
||||
return table ? { label: table.name, id: table.id } : null
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
},
|
||||
fetchOptions: fetchTableOptions,
|
||||
fetchOptionById: fetchTableOptionById,
|
||||
},
|
||||
|
||||
// Row ID for get/update/delete
|
||||
|
||||
@@ -74,8 +74,8 @@ export function useTablesList(workspaceId?: string) {
|
||||
throw new Error(error.error || 'Failed to fetch tables')
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
return data.tables as TableDefinition[]
|
||||
const response = await res.json()
|
||||
return (response.data?.tables || []) as TableDefinition[]
|
||||
},
|
||||
enabled: Boolean(workspaceId),
|
||||
staleTime: 30 * 1000, // Cache data for 30 seconds before refetching
|
||||
|
||||
@@ -45,7 +45,8 @@ export const tableBatchInsertRowsTool: ToolConfig<
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableBatchInsertResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -50,7 +50,8 @@ export const tableCreateTool: ToolConfig<TableCreateParams, TableCreateResponse>
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableCreateResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -41,7 +41,8 @@ export const tableDeleteRowTool: ToolConfig<TableRowDeleteParams, TableDeleteRes
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableDeleteResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -53,7 +53,8 @@ export const tableDeleteRowsByFilterTool: ToolConfig<
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableBulkOperationResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -38,13 +38,14 @@ export const tableGetRowTool: ToolConfig<TableRowGetParams, TableRowResponse> =
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableRowResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
row: data.row,
|
||||
message: 'Row retrieved successfully',
|
||||
message: data.message || 'Row retrieved successfully',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -32,14 +32,15 @@ export const tableGetSchemaTool: ToolConfig<TableGetSchemaParams, TableGetSchema
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableGetSchemaResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
name: data.table.name,
|
||||
columns: data.table.schema.columns,
|
||||
message: 'Schema retrieved successfully',
|
||||
message: data.message || 'Schema retrieved successfully',
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -42,7 +42,8 @@ export const tableInsertRowTool: ToolConfig<TableRowInsertParams, TableRowRespon
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableRowResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -24,7 +24,8 @@ export const tableListTool: ToolConfig<TableListParams, TableListResponse> = {
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableListResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -74,7 +74,8 @@ export const tableQueryRowsTool: ToolConfig<TableRowQueryParams, TableQueryRespo
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableQueryResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -48,7 +48,8 @@ export const tableUpdateRowTool: ToolConfig<TableRowUpdateParams, TableRowRespon
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableRowResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -60,7 +60,8 @@ export const tableUpdateRowsByFilterTool: ToolConfig<
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableBulkOperationResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -47,7 +47,8 @@ export const tableUpsertRowTool: ToolConfig<TableRowInsertParams, TableUpsertRes
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<TableUpsertResponse> => {
|
||||
const data = await response.json()
|
||||
const result = await response.json()
|
||||
const data = result.data || result
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
Reference in New Issue
Block a user