This commit is contained in:
Lakee Sivaraya
2026-01-16 14:59:14 -08:00
parent 86c5e1b4ff
commit 94c6795efc
5 changed files with 246 additions and 29 deletions

View File

@@ -105,7 +105,7 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) {
}
}
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row. */
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
@@ -132,10 +132,31 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const rowData = validated.data as RowData
// Fetch existing row to support partial updates
const [existingRow] = await db
.select({ data: userTableRows.data })
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!existingRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
// Merge existing data with incoming partial data (incoming takes precedence)
const mergedData = {
...(existingRow.data as RowData),
...(validated.data as RowData),
}
const validation = await validateRowData({
rowData,
rowData: mergedData,
schema: table.schema as TableSchema,
tableId,
excludeRowId: rowId,
@@ -147,7 +168,7 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
const [updatedRow] = await db
.update(userTableRows)
.set({
data: validated.data,
data: mergedData,
updatedAt: now,
})
.where(

View File

@@ -10,6 +10,85 @@ import type { TableSummary } from '../types'
const logger = createLogger('TableLLMEnrichment')
/**
* Cache for in-flight and recently fetched table schemas.
* Key: tableId, Value: { promise, timestamp }
* This deduplicates concurrent requests for the same table schema.
*/
const schemaCache = new Map<
string,
{
promise: Promise<TableSummary | null>
timestamp: number
}
>()
/** Schema cache TTL in milliseconds (5 seconds) */
const SCHEMA_CACHE_TTL_MS = 5000
/**
* Clears expired entries from the schema cache.
*/
function cleanupSchemaCache(): void {
const now = Date.now()
for (const [key, entry] of schemaCache.entries()) {
if (now - entry.timestamp > SCHEMA_CACHE_TTL_MS) {
schemaCache.delete(key)
}
}
}
/**
* Fetches table schema with caching and request deduplication.
* If a request for the same table is already in flight, returns the same promise.
*/
async function fetchTableSchemaWithCache(
tableId: string,
context: TableEnrichmentContext
): Promise<TableSummary | null> {
// Clean up old entries periodically
if (schemaCache.size > 50) {
cleanupSchemaCache()
}
const cacheKey = `${context.workspaceId}:${tableId}`
const cached = schemaCache.get(cacheKey)
// If we have a cached entry that's still valid, return it
if (cached && Date.now() - cached.timestamp < SCHEMA_CACHE_TTL_MS) {
return cached.promise
}
// Create a new fetch promise
const fetchPromise = (async (): Promise<TableSummary | null> => {
const schemaResult = await context.executeTool('table_get_schema', {
tableId,
_context: {
workspaceId: context.workspaceId,
workflowId: context.workflowId,
},
})
if (!schemaResult.success || !schemaResult.output) {
logger.warn(`Failed to fetch table schema: ${schemaResult.error}`)
return null
}
return {
name: schemaResult.output.name,
columns: schemaResult.output.columns || [],
}
})()
// Cache the promise immediately to deduplicate concurrent requests
schemaCache.set(cacheKey, {
promise: fetchPromise,
timestamp: Date.now(),
})
return fetchPromise
}
export interface TableEnrichmentContext {
workspaceId: string
workflowId: string
@@ -50,33 +129,17 @@ export async function enrichTableToolForLLM(
}
try {
logger.info(`Fetching schema for table ${tableId}`)
// Use cached schema fetch to deduplicate concurrent requests for the same table
const tableSchema = await fetchTableSchemaWithCache(tableId, context)
const schemaResult = await context.executeTool('table_get_schema', {
tableId,
_context: {
workspaceId: context.workspaceId,
workflowId: context.workflowId,
},
})
if (!schemaResult.success || !schemaResult.output) {
logger.warn(`Failed to fetch table schema: ${schemaResult.error}`)
if (!tableSchema) {
return null
}
const tableSchema: TableSummary = {
name: schemaResult.output.name,
columns: schemaResult.output.columns || [],
}
// Apply enrichment using the existing utility functions
const enrichedDescription = enrichTableToolDescription(originalDescription, tableSchema, toolId)
const enrichedParams = enrichTableToolParameters(llmSchema, tableSchema, toolId)
logger.info(`Enriched ${toolId} with ${tableSchema.columns.length} columns`)
return {
description: enrichedDescription,
parameters: {
@@ -86,7 +149,7 @@ export async function enrichTableToolForLLM(
},
}
} catch (error) {
logger.warn(`Error fetching table schema:`, error)
logger.warn('Error fetching table schema:', error)
return null
}
}
@@ -190,6 +253,16 @@ ${filterExample}${sortExample}`
{} as Record<string, unknown>
)
// Update operations support partial updates
if (toolId === 'table_update_row') {
return `${originalDescription}
Table "${table.name}" available columns:
${columnList}
For updates, only include the fields you want to change. Example: {"${exampleCols[0]?.name || 'field'}": "new_value"}`
}
return `${originalDescription}
Table "${table.name}" available columns:
@@ -268,9 +341,18 @@ export function enrichTableToolParameters(
},
{} as Record<string, unknown>
)
enrichedProperties.data = {
...enrichedProperties.data,
description: `REQUIRED object containing row values. Use columns: ${columnNames}. Example value: ${JSON.stringify(exampleData)}`,
// Update operations support partial updates - only include fields to change
if (toolId === 'table_update_row') {
enrichedProperties.data = {
...enrichedProperties.data,
description: `Object containing fields to update. Only include fields you want to change. Available columns: ${columnNames}`,
}
} else {
enrichedProperties.data = {
...enrichedProperties.data,
description: `REQUIRED object containing row values. Use columns: ${columnNames}. Example value: ${JSON.stringify(exampleData)}`,
}
}
}

View File

@@ -0,0 +1,60 @@
/**
* Wand enricher for table schema context.
*/
import { db } from '@sim/db'
import { userTableDefinitions } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { TableSchema } from '../types'
const logger = createLogger('TableWandEnricher')
/**
* Wand enricher that provides table schema context.
* Used by the wand API to inject table column information into the system prompt.
*/
export async function enrichTableSchema(
workspaceId: string | null,
context: Record<string, unknown>
): Promise<string | null> {
const tableId = context.tableId as string | undefined
if (!tableId || !workspaceId) {
return null
}
try {
const [table] = await db
.select({
name: userTableDefinitions.name,
schema: userTableDefinitions.schema,
})
.from(userTableDefinitions)
.where(
and(eq(userTableDefinitions.id, tableId), eq(userTableDefinitions.workspaceId, workspaceId))
)
.limit(1)
if (!table) {
return null
}
const schema = table.schema as TableSchema | null
if (!schema?.columns?.length) {
return null
}
const columnLines = schema.columns
.map((col) => {
const flags = [col.type, col.required && 'required', col.unique && 'unique'].filter(Boolean)
return `- ${col.name} (${flags.join(', ')})`
})
.join('\n')
const label = table.name ? `${table.name} (${tableId})` : tableId
return `Table schema for ${label}:\n${columnLines}\nBuilt-in columns: createdAt, updatedAt`
} catch (error) {
logger.debug('Failed to fetch table schema', { tableId, error })
return null
}
}

View File

@@ -43,6 +43,58 @@ function normalizeToolId(toolId: string): string {
*/
const MAX_REQUEST_BODY_SIZE_BYTES = 10 * 1024 * 1024 // 10MB
/**
* Parameter aliases that LLMs commonly use as synonyms.
* Maps alternative parameter names to their canonical names.
* Key: toolId, Value: map of alias -> canonical parameter name
*/
const PARAMETER_ALIASES: Record<string, Record<string, string>> = {
table_update_row: {
values: 'data',
row: 'data',
fields: 'data',
update: 'data',
updates: 'data',
changes: 'data',
newData: 'data',
rowData: 'data',
},
table_insert_row: {
values: 'data',
row: 'data',
fields: 'data',
rowData: 'data',
},
table_upsert_row: {
values: 'data',
row: 'data',
fields: 'data',
rowData: 'data',
},
}
/**
* Applies parameter aliases to normalize LLM-provided parameters.
* If the LLM uses an alias (e.g., "values" instead of "data"),
* this function maps it to the canonical parameter name.
*/
function applyParameterAliases(toolId: string, params: Record<string, any>): Record<string, any> {
const aliases = PARAMETER_ALIASES[toolId]
if (!aliases) return params
const normalizedParams = { ...params }
for (const [alias, canonical] of Object.entries(aliases)) {
// If the alias is present and the canonical name is not, copy the value
if (alias in normalizedParams && !(canonical in normalizedParams)) {
normalizedParams[canonical] = normalizedParams[alias]
delete normalizedParams[alias]
}
}
return normalizedParams
}
/**
* User-friendly error message for body size limit exceeded
*/
@@ -235,7 +287,8 @@ export async function executeTool(
}
// Ensure context is preserved if it exists
const contextParams = { ...params }
// Apply parameter aliases to handle common LLM synonym usage (e.g., "values" -> "data")
const contextParams = applyParameterAliases(normalizedToolId, { ...params })
// Validate the tool and its parameters
validateRequiredParametersAfterMerge(toolId, tool, contextParams)

View File

@@ -4,7 +4,8 @@ import type { TableRowResponse, TableRowUpdateParams } from './types'
export const tableUpdateRowTool: ToolConfig<TableRowUpdateParams, TableRowResponse> = {
id: 'table_update_row',
name: 'Update Row',
description: 'Update an existing row in a table',
description:
'Update an existing row in a table. Supports partial updates - only include the fields you want to change.',
version: '1.0.0',
params: {