migrate enrichment logic to general abstraction

This commit is contained in:
Vikhyath Mondreti
2026-01-21 17:08:20 -08:00
parent 2d49de76ea
commit 2818b745d1
15 changed files with 222 additions and 238 deletions

View File

@@ -711,9 +711,6 @@ export class AgentBlockHandler implements BlockHandler {
getAllBlocks,
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
getTool,
workspaceId: ctx.workspaceId,
workflowId: ctx.workflowId,
executeTool,
})
if (transformedTool) {

View File

@@ -5,155 +5,8 @@
* with table-specific information so LLMs can construct proper queries.
*/
import { createLogger } from '@sim/logger'
import type { TableSummary } from '../types'
const logger = createLogger('TableLLMEnrichment')
/**
* Cache for in-flight and recently fetched table schemas.
* Key: tableId, Value: { promise, timestamp }
* This deduplicates concurrent requests for the same table schema.
*/
const schemaCache = new Map<
string,
{
promise: Promise<TableSummary | null>
timestamp: number
}
>()
/** Schema cache TTL in milliseconds (5 seconds) */
const SCHEMA_CACHE_TTL_MS = 5000
/**
* Clears expired entries from the schema cache.
*/
function cleanupSchemaCache(): void {
const now = Date.now()
for (const [key, entry] of schemaCache.entries()) {
if (now - entry.timestamp > SCHEMA_CACHE_TTL_MS) {
schemaCache.delete(key)
}
}
}
/**
* Fetches table schema with caching and request deduplication.
* If a request for the same table is already in flight, returns the same promise.
*/
async function fetchTableSchemaWithCache(
tableId: string,
context: TableEnrichmentContext
): Promise<TableSummary | null> {
// Clean up old entries periodically
if (schemaCache.size > 50) {
cleanupSchemaCache()
}
const cacheKey = `${context.workspaceId}:${tableId}`
const cached = schemaCache.get(cacheKey)
// If we have a cached entry that's still valid, return it
if (cached && Date.now() - cached.timestamp < SCHEMA_CACHE_TTL_MS) {
return cached.promise
}
// Create a new fetch promise
const fetchPromise = (async (): Promise<TableSummary | null> => {
const schemaResult = await context.executeTool('table_get_schema', {
tableId,
_context: {
workspaceId: context.workspaceId,
workflowId: context.workflowId,
},
})
if (!schemaResult.success || !schemaResult.output) {
logger.warn(`Failed to fetch table schema: ${schemaResult.error}`)
return null
}
return {
name: schemaResult.output.name,
columns: schemaResult.output.columns || [],
}
})()
// Cache the promise immediately to deduplicate concurrent requests
schemaCache.set(cacheKey, {
promise: fetchPromise,
timestamp: Date.now(),
})
return fetchPromise
}
export interface TableEnrichmentContext {
workspaceId: string
workflowId: string
executeTool: (toolId: string, params: Record<string, any>) => Promise<any>
}
export interface TableEnrichmentResult {
description: string
parameters: {
properties: Record<string, any>
required: string[]
}
}
/**
* Enriches a table tool for LLM consumption by fetching its schema
* and injecting column information into the description and parameters.
*
* @param toolId - The table tool ID (e.g., 'table_query_rows')
* @param originalDescription - The tool's original description
* @param llmSchema - The original LLM schema
* @param userProvidedParams - Parameters provided by the user (must include tableId)
* @param context - Execution context with workspaceId, workflowId, and executeTool
* @returns Enriched description and parameters, or null if enrichment not applicable
*/
export async function enrichTableToolForLLM(
toolId: string,
originalDescription: string,
llmSchema: { properties?: Record<string, any>; required?: string[] },
userProvidedParams: Record<string, any>,
context: TableEnrichmentContext
): Promise<TableEnrichmentResult | null> {
const { tableId } = userProvidedParams
// Need a tableId to fetch schema
if (!tableId) {
return null
}
try {
// Use cached schema fetch to deduplicate concurrent requests for the same table
const tableSchema = await fetchTableSchemaWithCache(tableId, context)
if (!tableSchema) {
return null
}
// Apply enrichment using the existing utility functions
const enrichedDescription = enrichTableToolDescription(originalDescription, tableSchema, toolId)
const enrichedParams = enrichTableToolParameters(llmSchema, tableSchema, toolId)
return {
description: enrichedDescription,
parameters: {
properties: enrichedParams.properties,
required:
enrichedParams.required.length > 0 ? enrichedParams.required : llmSchema.required || [],
},
}
} catch (error) {
logger.warn('Error fetching table schema:', error)
return null
}
}
/**
* Operations that use filters and need filter-specific enrichment.
*/
@@ -175,11 +28,6 @@ export const DATA_OPERATIONS = new Set([
/**
* Enriches a table tool description with table information based on the operation type.
*
* @param originalDescription - The original tool description
* @param table - The table summary with name and columns
* @param toolId - The tool identifier to determine operation type
* @returns Enriched description with table-specific instructions
*/
export function enrichTableToolDescription(
originalDescription: string,
@@ -192,7 +40,6 @@ export function enrichTableToolDescription(
const columnList = table.columns.map((col) => ` - ${col.name} (${col.type})`).join('\n')
// Filter-based operations: emphasize filter usage
if (FILTER_OPERATIONS.has(toolId)) {
const stringCols = table.columns.filter((c) => c.type === 'string')
const numberCols = table.columns.filter((c) => c.type === 'number')
@@ -208,14 +55,12 @@ Example filter: {"${stringCols[0].name}": {"$eq": "value"}, "${numberCols[0].nam
Example filter: {"${stringCols[0].name}": {"$eq": "value"}}`
}
// Add sort example for query operations with numeric columns
let sortExample = ''
if (toolId === 'table_query_rows' && numberCols.length > 0) {
sortExample = `
Example sort: {"${numberCols[0].name}": "desc"} for highest first, {"${numberCols[0].name}": "asc"} for lowest first`
}
// Query-specific instructions with sort/limit guidance
const queryInstructions =
toolId === 'table_query_rows'
? `
@@ -242,7 +87,6 @@ ${columnList}
${filterExample}${sortExample}`
}
// Data operations: show columns for data construction
if (DATA_OPERATIONS.has(toolId)) {
const exampleCols = table.columns.slice(0, 3)
const dataExample = exampleCols.reduce(
@@ -253,7 +97,6 @@ ${filterExample}${sortExample}`
{} as Record<string, unknown>
)
// Update operations support partial updates
if (toolId === 'table_update_row') {
return `${originalDescription}
@@ -271,7 +114,6 @@ ${columnList}
Pass the "data" parameter with an object like: ${JSON.stringify(dataExample)}`
}
// Default: just show columns
return `${originalDescription}
Table "${table.name}" columns:
@@ -280,11 +122,6 @@ ${columnList}`
/**
* Enriches LLM tool parameters with table-specific information.
*
* @param llmSchema - The original LLM schema with properties and required fields
* @param table - The table summary with name and columns
* @param toolId - The tool identifier to determine operation type
* @returns Enriched schema with updated property descriptions and required fields
*/
export function enrichTableToolParameters(
llmSchema: { properties?: Record<string, any>; required?: string[] },
@@ -302,7 +139,6 @@ export function enrichTableToolParameters(
const enrichedProperties = { ...llmSchema.properties }
const enrichedRequired = llmSchema.required ? [...llmSchema.required] : []
// Enrich filter parameter for filter-based operations
if (enrichedProperties.filter && FILTER_OPERATIONS.has(toolId)) {
enrichedProperties.filter = {
...enrichedProperties.filter,
@@ -310,12 +146,10 @@ export function enrichTableToolParameters(
}
}
// Mark filter as required in schema for query operations
if (FILTER_OPERATIONS.has(toolId) && !enrichedRequired.includes('filter')) {
enrichedRequired.push('filter')
}
// Enrich sort parameter for query operations
if (enrichedProperties.sort && toolId === 'table_query_rows') {
enrichedProperties.sort = {
...enrichedProperties.sort,
@@ -323,7 +157,6 @@ export function enrichTableToolParameters(
}
}
// Enrich limit parameter for query operations
if (enrichedProperties.limit && toolId === 'table_query_rows') {
enrichedProperties.limit = {
...enrichedProperties.limit,
@@ -331,7 +164,6 @@ export function enrichTableToolParameters(
}
}
// Enrich data parameter for insert/update operations
if (enrichedProperties.data && DATA_OPERATIONS.has(toolId)) {
const exampleCols = table.columns.slice(0, 2)
const exampleData = exampleCols.reduce(
@@ -342,7 +174,6 @@ export function enrichTableToolParameters(
{} as Record<string, unknown>
)
// Update operations support partial updates - only include fields to change
if (toolId === 'table_update_row') {
enrichedProperties.data = {
...enrichedProperties.data,
@@ -356,7 +187,6 @@ export function enrichTableToolParameters(
}
}
// Enrich rows parameter for batch insert
if (enrichedProperties.rows && toolId === 'table_batch_insert_rows') {
enrichedProperties.rows = {
...enrichedProperties.rows,

View File

@@ -3,7 +3,6 @@ import type { ChatCompletionChunk } from 'openai/resources/chat/completions'
import type { CompletionUsage } from 'openai/resources/completions'
import { env } from '@/lib/core/config/env'
import { isHosted } from '@/lib/core/config/feature-flags'
import { enrichTableToolForLLM } from '@/lib/table/llm'
import { isCustomTool } from '@/executor/constants'
import {
getComputerUseModels,
@@ -433,20 +432,9 @@ export async function transformBlockTool(
getAllBlocks: () => any[]
getTool: (toolId: string) => any
getToolAsync?: (toolId: string) => Promise<any>
workspaceId?: string
workflowId?: string
executeTool?: (toolId: string, params: Record<string, any>) => Promise<any>
}
): Promise<ProviderToolConfig | null> {
const {
selectedOperation,
getAllBlocks,
getTool,
getToolAsync,
workspaceId,
workflowId,
executeTool,
} = options
const { selectedOperation, getAllBlocks, getTool, getToolAsync } = options
const blockDef = getAllBlocks().find((b: any) => b.type === block.type)
if (!blockDef) {
@@ -500,11 +488,14 @@ export async function transformBlockTool(
const userProvidedParams = block.params || {}
const llmSchema = await createLLMToolSchema(toolConfig, userProvidedParams)
const { schema: llmSchema, enrichedDescription } = await createLLMToolSchema(
toolConfig,
userProvidedParams
)
let uniqueToolId = toolConfig.id
let toolName = toolConfig.name
let toolDescription = toolConfig.description
let toolDescription = enrichedDescription || toolConfig.description
if (toolId === 'workflow_executor' && userProvidedParams.workflowId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.workflowId}`
@@ -521,36 +512,16 @@ export async function transformBlockTool(
}
} else if (toolId.startsWith('knowledge_') && userProvidedParams.knowledgeBaseId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
}
// Apply table tool enrichment if applicable
let finalDescription = toolDescription
let finalSchema = llmSchema
if (toolId.startsWith('table_') && workspaceId && workflowId && executeTool) {
const result = await enrichTableToolForLLM(
toolId,
toolDescription,
llmSchema,
userProvidedParams,
{
workspaceId,
workflowId,
executeTool,
}
)
if (result) {
finalDescription = result.description
finalSchema = { ...llmSchema, ...result.parameters }
}
} else if (toolId.startsWith('table_') && userProvidedParams.tableId) {
uniqueToolId = `${toolConfig.id}_${userProvidedParams.tableId}`
}
return {
id: uniqueToolId,
name: toolName,
description: finalDescription,
description: toolDescription,
params: userProvidedParams,
parameters: finalSchema,
parameters: llmSchema,
}
}

View File

@@ -19,22 +19,41 @@ import {
const logger = createLogger('Tools')
/**
* Normalizes a tool ID by stripping resource ID suffix (UUID).
* Normalizes a tool ID by stripping resource ID suffix (UUID/tableId).
* Workflow tools: 'workflow_executor_<uuid>' -> 'workflow_executor'
* Knowledge tools: 'knowledge_search_<uuid>' -> 'knowledge_search'
* Table tools: 'table_query_rows_<tableId>' -> 'table_query_rows'
*/
function normalizeToolId(toolId: string): string {
// Check for workflow_executor_<uuid> pattern
if (toolId.startsWith('workflow_executor_') && toolId.length > 'workflow_executor_'.length) {
return 'workflow_executor'
}
// Check for knowledge_<operation>_<uuid> pattern
const knowledgeOps = ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document']
for (const op of knowledgeOps) {
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
return op
}
}
const tableOps = [
'table_query_rows',
'table_insert_row',
'table_batch_insert_rows',
'table_update_row',
'table_update_rows_by_filter',
'table_delete_rows_by_filter',
'table_upsert_row',
'table_get_row',
'table_delete_row',
'table_get_schema',
]
for (const op of tableOps) {
if (toolId.startsWith(`${op}_`) && toolId.length > op.length + 1) {
return op
}
}
return toolId
}

View File

@@ -89,7 +89,7 @@ describe('Tool Parameters Utils', () => {
channel: '#general',
}
const schema = await createLLMToolSchema(mockToolConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(mockToolConfig, userProvidedParams)
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, excluded
expect(schema.properties).not.toHaveProperty('channel') // user-provided, excluded
@@ -100,7 +100,7 @@ describe('Tool Parameters Utils', () => {
})
it.concurrent('should include all parameters when none are user-provided', async () => {
const schema = await createLLMToolSchema(mockToolConfig, {})
const { schema } = await createLLMToolSchema(mockToolConfig, {})
expect(schema.properties).not.toHaveProperty('apiKey') // user-only, never shown to LLM
expect(schema.properties).toHaveProperty('message') // user-or-llm, shown to LLM
@@ -332,7 +332,10 @@ describe('Tool Parameters Utils', () => {
inputMapping: '{}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
expect(schema.properties.inputMapping.type).toBe('object')
@@ -347,7 +350,10 @@ describe('Tool Parameters Utils', () => {
inputMapping: '{"query": "", "limit": ""}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
}
@@ -360,7 +366,10 @@ describe('Tool Parameters Utils', () => {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(
mockWorkflowExecutorConfig,
userProvidedParams
)
expect(schema.properties).toHaveProperty('inputMapping')
}
@@ -371,7 +380,7 @@ describe('Tool Parameters Utils', () => {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
const { schema } = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).not.toHaveProperty('workflowId')
expect(schema.properties).toHaveProperty('inputMapping')
@@ -545,7 +554,7 @@ describe('Tool Parameters Utils', () => {
describe('Type Interface Validation', () => {
it.concurrent('should have properly typed ToolSchema', async () => {
const schema: ToolSchema = await createLLMToolSchema(mockToolConfig, {})
const { schema } = await createLLMToolSchema(mockToolConfig, {})
expect(schema.type).toBe('object')
expect(typeof schema.properties).toBe('object')

View File

@@ -109,6 +109,11 @@ export interface ToolSchema {
required: string[]
}
export interface LLMToolSchemaResult {
schema: ToolSchema
enrichedDescription?: string
}
export interface ValidationResult {
valid: boolean
missingParams: string[]
@@ -419,27 +424,19 @@ export function createUserToolSchema(toolConfig: ToolConfig): ToolSchema {
export async function createLLMToolSchema(
toolConfig: ToolConfig,
userProvidedParams: Record<string, unknown>
): Promise<ToolSchema> {
): Promise<LLMToolSchemaResult> {
const schema: ToolSchema = {
type: 'object',
properties: {},
required: [],
}
// Only include parameters that the LLM should/can provide
for (const [paramId, param] of Object.entries(toolConfig.params)) {
// Check if this param has schema enrichment config
const enrichmentConfig = toolConfig.schemaEnrichment?.[paramId]
// Special handling for workflow_executor's inputMapping parameter
// Always include in LLM schema so LLM can provide dynamic input values
// even if user has configured empty/partial inputMapping in the UI
const isWorkflowInputMapping =
toolConfig.id === 'workflow_executor' && paramId === 'inputMapping'
// Parameters with enrichment config are treated specially:
// - Include them if dependency value is available (even if normally hidden)
// - Skip them if dependency value is not available
if (enrichmentConfig) {
const dependencyValue = userProvidedParams[enrichmentConfig.dependsOn] as string
if (!dependencyValue) {
@@ -461,26 +458,21 @@ export async function createLLMToolSchema(
}
if (!isWorkflowInputMapping) {
// Skip parameters that user has already provided
if (isNonEmpty(userProvidedParams[paramId])) {
continue
}
// Skip parameters that are user-only (never shown to LLM)
if (param.visibility === 'user-only') {
continue
}
// Skip hidden parameters
if (param.visibility === 'hidden') {
continue
}
}
// Add parameter to LLM schema
const propertySchema = buildParameterSchema(toolConfig.id, paramId, param)
// Apply dynamic schema enrichment for workflow_executor's inputMapping
if (isWorkflowInputMapping) {
const workflowId = userProvidedParams.workflowId as string
if (workflowId) {
@@ -490,13 +482,29 @@ export async function createLLMToolSchema(
schema.properties[paramId] = propertySchema
// Add to required if LLM must provide it and it's originally required
if ((param.visibility === 'user-or-llm' || param.visibility === 'llm-only') && param.required) {
schema.required.push(paramId)
}
}
return schema
if (toolConfig.toolEnrichment) {
const dependencyValue = userProvidedParams[toolConfig.toolEnrichment.dependsOn] as string
if (dependencyValue) {
const enriched = await toolConfig.toolEnrichment.enrichTool(
dependencyValue,
schema,
toolConfig.description
)
if (enriched) {
return {
schema: enriched.parameters as ToolSchema,
enrichedDescription: enriched.description,
}
}
}
}
return { schema }
}
/**

View File

@@ -1,7 +1,76 @@
import { createLogger } from '@sim/logger'
import { enrichTableToolDescription, enrichTableToolParameters } from '@/lib/table/llm/enrichment'
import type { TableSummary } from '@/lib/table/types'
const logger = createLogger('SchemaEnrichers')
async function fetchTableSchema(tableId: string): Promise<TableSummary | null> {
try {
const { buildAuthHeaders, buildAPIUrl } = await import('@/executor/utils/http')
const headers = await buildAuthHeaders()
const url = buildAPIUrl(`/api/table/${tableId}/schema`)
const response = await fetch(url.toString(), { headers })
if (!response.ok) {
logger.warn(`Failed to fetch table schema for ${tableId}: ${response.status}`)
return null
}
const result = await response.json()
const data = result.data || result
return {
name: data.name || 'Table',
columns: data.columns || [],
}
} catch (error) {
logger.error('Failed to fetch table schema:', error)
return null
}
}
export async function enrichTableToolSchema(
tableId: string,
toolId: string,
originalSchema: {
type: 'object'
properties: Record<string, unknown>
required: string[]
},
originalDescription: string
): Promise<{
description: string
parameters: {
type: 'object'
properties: Record<string, unknown>
required: string[]
}
} | null> {
const tableSchema = await fetchTableSchema(tableId)
if (!tableSchema) {
return null
}
const enrichedDescription = enrichTableToolDescription(originalDescription, tableSchema, toolId)
const enrichedParams = enrichTableToolParameters(
{ properties: originalSchema.properties, required: originalSchema.required },
tableSchema,
toolId
)
return {
description: enrichedDescription,
parameters: {
type: 'object',
properties: enrichedParams.properties,
required:
enrichedParams.required.length > 0 ? enrichedParams.required : originalSchema.required,
},
}
}
interface TagDefinition {
id: string
tagSlot: string

View File

@@ -1,4 +1,5 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableBatchInsertParams, TableBatchInsertResponse } from './types'
@@ -11,6 +12,12 @@ export const tableBatchInsertRowsTool: ToolConfig<
description: `Insert multiple rows into a table at once (up to ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows)`,
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_batch_insert_rows', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,4 +1,5 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableBulkOperationResponse, TableDeleteByFilterParams } from './types'
@@ -12,6 +13,12 @@ export const tableDeleteRowsByFilterTool: ToolConfig<
'Delete multiple rows that match filter criteria. Use with caution - supports optional limit for safety.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_delete_rows_by_filter', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,3 +1,4 @@
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableRowInsertParams, TableRowResponse } from './types'
@@ -8,6 +9,12 @@ export const tableInsertRowTool: ToolConfig<TableRowInsertParams, TableRowRespon
'Insert a new row into a table. IMPORTANT: You must use the "data" parameter (not "values", "row", "fields", or other variations) to specify the row contents.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_insert_row', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,4 +1,5 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableQueryResponse, TableRowQueryParams } from './types'
@@ -8,6 +9,12 @@ export const tableQueryRowsTool: ToolConfig<TableRowQueryParams, TableQueryRespo
description: 'Query rows from a table with filtering, sorting, and pagination',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_query_rows', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,3 +1,4 @@
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableRowResponse, TableRowUpdateParams } from './types'
@@ -8,6 +9,12 @@ export const tableUpdateRowTool: ToolConfig<TableRowUpdateParams, TableRowRespon
'Update an existing row in a table. Supports partial updates - only include the fields you want to change. IMPORTANT: You must use the "data" parameter (not "values", "row", "fields", or other variations) to specify the fields to update.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_update_row', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,4 +1,5 @@
import { TABLE_LIMITS } from '@/lib/table/constants'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig } from '@/tools/types'
import type { TableBulkOperationResponse, TableUpdateByFilterParams } from './types'
@@ -12,6 +13,12 @@ export const tableUpdateRowsByFilterTool: ToolConfig<
'Update multiple rows that match filter criteria. Data is merged with existing row data.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_update_rows_by_filter', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -1,4 +1,5 @@
import type { TableRow } from '@/lib/table/types'
import { enrichTableToolSchema } from '@/tools/schema-enrichers'
import type { ToolConfig, ToolResponse } from '@/tools/types'
import type { TableRowInsertParams } from './types'
@@ -17,6 +18,12 @@ export const tableUpsertRowTool: ToolConfig<TableRowInsertParams, TableUpsertRes
'Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row. IMPORTANT: You must use the "data" parameter (not "values", "row", "fields", or other variations) to specify the row contents.',
version: '1.0.0',
toolEnrichment: {
dependsOn: 'tableId',
enrichTool: (tableId, schema, desc) =>
enrichTableToolSchema(tableId, 'table_upsert_row', schema, desc),
},
params: {
tableId: {
type: 'string',

View File

@@ -127,6 +127,12 @@ export interface ToolConfig<P = any, R = any> {
* Maps param IDs to their enrichment configuration.
*/
schemaEnrichment?: Record<string, SchemaEnrichmentConfig>
/**
* Optional tool-level enrichment that modifies description and all parameters.
* Use when multiple params depend on a single runtime value.
*/
toolEnrichment?: ToolEnrichmentConfig
}
export interface TableRow {
@@ -170,3 +176,29 @@ export interface SchemaEnrichmentConfig {
required?: string[]
} | null>
}
/**
* Configuration for enriching an entire tool (description + all parameters) at runtime.
* Used when multiple parameters and the description depend on a single runtime value (e.g., tableId).
*/
export interface ToolEnrichmentConfig {
/** The param ID that this enrichment depends on (e.g., 'tableId') */
dependsOn: string
/** Function to enrich the tool's description and parameter schema */
enrichTool: (
dependencyValue: string,
originalSchema: {
type: 'object'
properties: Record<string, unknown>
required: string[]
},
originalDescription: string
) => Promise<{
description: string
parameters: {
type: 'object'
properties: Record<string, unknown>
required: string[]
}
} | null>
}