fix(tools): fixed workflow tool for agent to respect user provided params, inject at runtime like all other tools (#2750)

* fix(tools): fixed wrokflow tool for agent to respect user provided params, inject at runtime like all other tools

* ack comments

* remove redunant if-else

* added tests
This commit is contained in:
Waleed
2026-01-09 17:12:58 -08:00
committed by GitHub
parent 796f73ee01
commit 1f5e8a41f8
7 changed files with 629 additions and 41 deletions

View File

@@ -21,7 +21,6 @@ export async function POST(req: NextRequest) {
const requestId = generateRequestId()
try {
// Get user session
const session = await getSession()
if (!session?.user?.email) {
logger.warn(`[${requestId}] Unauthorized help request attempt`)
@@ -30,20 +29,20 @@ export async function POST(req: NextRequest) {
const email = session.user.email
// Handle multipart form data
const formData = await req.formData()
// Extract form fields
const subject = formData.get('subject') as string
const message = formData.get('message') as string
const type = formData.get('type') as string
const workflowId = formData.get('workflowId') as string | null
const workspaceId = formData.get('workspaceId') as string
const userAgent = formData.get('userAgent') as string | null
logger.info(`[${requestId}] Processing help request`, {
type,
email: `${email.substring(0, 3)}***`, // Log partial email for privacy
})
// Validate the form data
const validationResult = helpFormSchema.safeParse({
subject,
message,
@@ -60,7 +59,6 @@ export async function POST(req: NextRequest) {
)
}
// Extract images
const images: { filename: string; content: Buffer; contentType: string }[] = []
for (const [key, value] of formData.entries()) {
@@ -81,10 +79,14 @@ export async function POST(req: NextRequest) {
logger.debug(`[${requestId}] Help request includes ${images.length} images`)
// Prepare email content
const userId = session.user.id
let emailText = `
Type: ${type}
From: ${email}
User ID: ${userId}
Workspace ID: ${workspaceId ?? 'N/A'}
Workflow ID: ${workflowId ?? 'N/A'}
Browser: ${userAgent ?? 'N/A'}
${message}
`
@@ -115,7 +117,6 @@ ${message}
logger.info(`[${requestId}] Help request email sent successfully`)
// Send confirmation email to the user
try {
const confirmationHtml = await renderHelpConfirmationEmail(
type as 'bug' | 'feedback' | 'feature_request' | 'other',

View File

@@ -57,9 +57,11 @@ interface ImageWithPreview extends File {
interface HelpModalProps {
open: boolean
onOpenChange: (open: boolean) => void
workflowId?: string
workspaceId: string
}
export function HelpModal({ open, onOpenChange }: HelpModalProps) {
export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpModalProps) {
const fileInputRef = useRef<HTMLInputElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
@@ -370,18 +372,20 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
setSubmitStatus(null)
try {
// Prepare form data with images
const formData = new FormData()
formData.append('subject', data.subject)
formData.append('message', data.message)
formData.append('type', data.type)
formData.append('workspaceId', workspaceId)
formData.append('userAgent', navigator.userAgent)
if (workflowId) {
formData.append('workflowId', workflowId)
}
// Attach all images to form data
images.forEach((image, index) => {
formData.append(`image_${index}`, image)
})
// Submit to API
const response = await fetch('/api/help', {
method: 'POST',
body: formData,
@@ -392,11 +396,9 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
throw new Error(errorData.error || 'Failed to submit help request')
}
// Handle success
setSubmitStatus('success')
reset()
// Clean up resources
images.forEach((image) => URL.revokeObjectURL(image.preview))
setImages([])
} catch (error) {
@@ -406,7 +408,7 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
setIsSubmitting(false)
}
},
[images, reset]
[images, reset, workflowId, workspaceId]
)
/**

View File

@@ -661,7 +661,12 @@ export function Sidebar() {
/>
{/* Footer Navigation Modals */}
<HelpModal open={isHelpModalOpen} onOpenChange={setIsHelpModalOpen} />
<HelpModal
open={isHelpModalOpen}
onOpenChange={setIsHelpModalOpen}
workflowId={workflowId}
workspaceId={workspaceId}
/>
<SettingsModal
open={isSettingsModalOpen}
onOpenChange={(open) => (open ? openSettingsModal() : closeSettingsModal())}

View File

@@ -25,6 +25,7 @@ import {
MODELS_WITH_TEMPERATURE_SUPPORT,
MODELS_WITH_VERBOSITY,
PROVIDERS_WITH_TOOL_USAGE_CONTROL,
prepareToolExecution,
prepareToolsWithUsageControl,
shouldBillModelUsage,
supportsTemperature,
@@ -979,6 +980,245 @@ describe('Tool Management', () => {
})
})
describe('prepareToolExecution', () => {
describe('basic parameter merging', () => {
it.concurrent('should merge LLM args with user params', () => {
const tool = {
params: { apiKey: 'user-key', channel: '#general' },
}
const llmArgs = { message: 'Hello world', channel: '#random' }
const request = { workflowId: 'wf-123' }
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
expect(toolParams.apiKey).toBe('user-key')
expect(toolParams.channel).toBe('#general') // User value wins
expect(toolParams.message).toBe('Hello world')
})
it.concurrent('should filter out empty string user params', () => {
const tool = {
params: { apiKey: 'user-key', channel: '' }, // Empty channel
}
const llmArgs = { message: 'Hello', channel: '#llm-channel' }
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
expect(toolParams.apiKey).toBe('user-key')
expect(toolParams.channel).toBe('#llm-channel') // LLM value used since user is empty
expect(toolParams.message).toBe('Hello')
})
})
describe('inputMapping deep merge for workflow tools', () => {
it.concurrent('should deep merge inputMapping when user provides empty object', () => {
const tool = {
params: {
workflowId: 'child-workflow-123',
inputMapping: '{}', // Empty JSON string from UI
},
}
const llmArgs = {
inputMapping: { query: 'search term', limit: 10 },
}
const request = { workflowId: 'parent-workflow' }
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// LLM values should be used since user object is empty
expect(toolParams.inputMapping).toEqual({ query: 'search term', limit: 10 })
expect(toolParams.workflowId).toBe('child-workflow-123')
})
it.concurrent('should deep merge inputMapping with partial user values', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: '{"query": "", "customField": "user-value"}', // Partial values
},
}
const llmArgs = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// LLM fills empty query, user's customField preserved, LLM's limit included
expect(toolParams.inputMapping).toEqual({
query: 'llm-search',
limit: 10,
customField: 'user-value',
})
})
it.concurrent('should preserve non-empty user inputMapping values', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: '{"query": "user-search", "limit": 5}',
},
}
const llmArgs = {
inputMapping: { query: 'llm-search', limit: 10, extra: 'field' },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// User values win, but LLM's extra field is included
expect(toolParams.inputMapping).toEqual({
query: 'user-search',
limit: 5,
extra: 'field',
})
})
it.concurrent('should handle inputMapping as object (not JSON string)', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: { query: '', customField: 'user-value' }, // Object, not string
},
}
const llmArgs = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
expect(toolParams.inputMapping).toEqual({
query: 'llm-search',
limit: 10,
customField: 'user-value',
})
})
it.concurrent('should use LLM inputMapping when user does not provide it', () => {
const tool = {
params: { workflowId: 'child-workflow' }, // No inputMapping
}
const llmArgs = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
expect(toolParams.inputMapping).toEqual({ query: 'llm-search', limit: 10 })
})
it.concurrent('should use user inputMapping when LLM does not provide it', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: '{"query": "user-search"}',
},
}
const llmArgs = {} // No inputMapping from LLM
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
expect(toolParams.inputMapping).toEqual({ query: 'user-search' })
})
it.concurrent('should handle invalid JSON in user inputMapping gracefully', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: 'not valid json {',
},
}
const llmArgs = {
inputMapping: { query: 'llm-search' },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// Should use LLM values since user JSON is invalid
expect(toolParams.inputMapping).toEqual({ query: 'llm-search' })
})
it.concurrent('should not affect other parameters - normal override behavior', () => {
const tool = {
params: { apiKey: 'user-key', channel: '#general' },
}
const llmArgs = { message: 'Hello', channel: '#random' }
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// Normal behavior: user values override LLM values
expect(toolParams.apiKey).toBe('user-key')
expect(toolParams.channel).toBe('#general') // User value wins
expect(toolParams.message).toBe('Hello')
})
it.concurrent('should preserve 0 and false as valid user values in inputMapping', () => {
const tool = {
params: {
workflowId: 'child-workflow',
inputMapping: '{"limit": 0, "enabled": false, "query": ""}',
},
}
const llmArgs = {
inputMapping: { limit: 10, enabled: true, query: 'llm-search' },
}
const request = {}
const { toolParams } = prepareToolExecution(tool, llmArgs, request)
// 0 and false should be preserved (they're valid values)
// empty string should be filled by LLM
expect(toolParams.inputMapping).toEqual({
limit: 0,
enabled: false,
query: 'llm-search',
})
})
})
describe('execution params context', () => {
it.concurrent('should include workflow context in executionParams', () => {
const tool = { params: { message: 'test' } }
const llmArgs = {}
const request = {
workflowId: 'wf-123',
workspaceId: 'ws-456',
chatId: 'chat-789',
userId: 'user-abc',
}
const { executionParams } = prepareToolExecution(tool, llmArgs, request)
expect(executionParams._context).toEqual({
workflowId: 'wf-123',
workspaceId: 'ws-456',
chatId: 'chat-789',
userId: 'user-abc',
})
})
it.concurrent('should include environment and workflow variables', () => {
const tool = { params: {} }
const llmArgs = {}
const request = {
environmentVariables: { API_KEY: 'secret' },
workflowVariables: { counter: 42 },
}
const { executionParams } = prepareToolExecution(tool, llmArgs, request)
expect(executionParams.envVars).toEqual({ API_KEY: 'secret' })
expect(executionParams.workflowVariables).toEqual({ counter: 42 })
})
})
})
describe('Provider/Model Blacklist', () => {
describe('isProviderBlacklisted', () => {
it.concurrent('should return false when no providers are blacklisted', () => {

View File

@@ -30,6 +30,7 @@ import {
import type { ProviderId, ProviderToolConfig } from '@/providers/types'
import { useCustomToolsStore } from '@/stores/custom-tools/store'
import { useProvidersStore } from '@/stores/providers/store'
import { deepMergeInputMapping } from '@/tools/params'
const logger = createLogger('ProviderUtils')
@@ -973,7 +974,7 @@ export function prepareToolExecution(
llmArgs: Record<string, any>,
request: {
workflowId?: string
workspaceId?: string // Add workspaceId for MCP tools
workspaceId?: string
chatId?: string
userId?: string
environmentVariables?: Record<string, any>
@@ -994,9 +995,24 @@ export function prepareToolExecution(
}
}
const toolParams = {
...llmArgs,
...filteredUserParams,
// Start with LLM params as base
const toolParams: Record<string, any> = { ...llmArgs }
// Apply user params with special handling for inputMapping
for (const [key, userValue] of Object.entries(filteredUserParams)) {
if (key === 'inputMapping') {
// Deep merge inputMapping so LLM values fill in empty user fields
const llmInputMapping = llmArgs.inputMapping as Record<string, any> | undefined
toolParams.inputMapping = deepMergeInputMapping(llmInputMapping, userValue)
} else {
// Normal override for other params
toolParams[key] = userValue
}
}
// If LLM provided inputMapping but user didn't, ensure it's included
if (llmArgs.inputMapping && !filteredUserParams.inputMapping) {
toolParams.inputMapping = llmArgs.inputMapping
}
const executionParams = {

View File

@@ -296,6 +296,253 @@ describe('Tool Parameters Utils', () => {
})
})
describe('workflow_executor inputMapping handling', () => {
const mockWorkflowExecutorConfig = {
id: 'workflow_executor',
name: 'Workflow Executor',
description: 'Execute another workflow',
version: '1.0.0',
params: {
workflowId: {
type: 'string',
required: true,
visibility: 'user-or-llm' as ParameterVisibility,
description: 'The ID of the workflow to execute',
},
inputMapping: {
type: 'object',
required: false,
visibility: 'user-or-llm' as ParameterVisibility,
description: 'Map inputs to the selected workflow',
},
},
request: {
url: 'https://api.example.com/workflows',
method: 'POST' as HttpMethod,
headers: () => ({}),
},
}
describe('createLLMToolSchema - inputMapping always included', () => {
it.concurrent(
'should include inputMapping in schema even when user provides empty object',
async () => {
const userProvidedParams = {
workflowId: 'workflow-123',
inputMapping: '{}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).toHaveProperty('inputMapping')
expect(schema.properties.inputMapping.type).toBe('object')
}
)
it.concurrent(
'should include inputMapping in schema even when user provides object with empty values',
async () => {
const userProvidedParams = {
workflowId: 'workflow-123',
inputMapping: '{"query": "", "limit": ""}',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).toHaveProperty('inputMapping')
}
)
it.concurrent(
'should include inputMapping when user has not provided it at all',
async () => {
const userProvidedParams = {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).toHaveProperty('inputMapping')
}
)
it.concurrent('should exclude workflowId from schema when user provides it', async () => {
const userProvidedParams = {
workflowId: 'workflow-123',
}
const schema = await createLLMToolSchema(mockWorkflowExecutorConfig, userProvidedParams)
expect(schema.properties).not.toHaveProperty('workflowId')
expect(schema.properties).toHaveProperty('inputMapping')
})
})
describe('mergeToolParameters - inputMapping deep merge', () => {
it.concurrent('should deep merge inputMapping when user provides empty object', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{}',
}
const llmGenerated = {
inputMapping: { query: 'search term', limit: 10 },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({ query: 'search term', limit: 10 })
expect(merged.workflowId).toBe('workflow-123')
})
it.concurrent('should deep merge inputMapping when user provides partial values', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{"query": "", "customField": "user-value"}',
}
const llmGenerated = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({
query: 'llm-search',
limit: 10,
customField: 'user-value',
})
})
it.concurrent('should preserve user inputMapping values when they are non-empty', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{"query": "user-search", "limit": 5}',
}
const llmGenerated = {
inputMapping: { query: 'llm-search', limit: 10, extra: 'field' },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({
query: 'user-search',
limit: 5,
extra: 'field',
})
})
it.concurrent('should handle inputMapping as object (not JSON string)', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: { query: '', customField: 'user-value' },
}
const llmGenerated = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({
query: 'llm-search',
limit: 10,
customField: 'user-value',
})
})
it.concurrent('should use LLM inputMapping when user does not provide it', () => {
const userProvided = {
workflowId: 'workflow-123',
}
const llmGenerated = {
inputMapping: { query: 'llm-search', limit: 10 },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({ query: 'llm-search', limit: 10 })
})
it.concurrent('should use user inputMapping when LLM does not provide it', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{"query": "user-search"}',
}
const llmGenerated = {}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({ query: 'user-search' })
})
it.concurrent('should handle invalid JSON in user inputMapping gracefully', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: 'not valid json {',
}
const llmGenerated = {
inputMapping: { query: 'llm-search' },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({ query: 'llm-search' })
})
it.concurrent(
'should fill field when user typed something then removed it (field becomes empty string)',
() => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{"query": ""}',
}
const llmGenerated = {
inputMapping: { query: 'llm-generated-search' },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.inputMapping).toEqual({ query: 'llm-generated-search' })
}
)
it.concurrent('should not affect other parameters - normal override behavior', () => {
const userProvided = {
apiKey: 'user-key',
channel: '#general',
}
const llmGenerated = {
message: 'Hello world',
channel: '#random',
}
const merged = mergeToolParameters(userProvided, llmGenerated)
expect(merged.apiKey).toBe('user-key')
expect(merged.channel).toBe('#general')
expect(merged.message).toBe('Hello world')
})
it.concurrent('should preserve 0 and false as valid user values in inputMapping', () => {
const userProvided = {
workflowId: 'workflow-123',
inputMapping: '{"limit": 0, "enabled": false, "query": ""}',
}
const llmGenerated = {
inputMapping: { limit: 10, enabled: true, query: 'llm-search' },
}
const merged = mergeToolParameters(userProvided, llmGenerated)
// 0 and false should be preserved (they're valid values)
// empty string should be filled by LLM
expect(merged.inputMapping).toEqual({
limit: 0,
enabled: false,
query: 'llm-search',
})
})
})
})
describe('Type Interface Validation', () => {
it.concurrent('should have properly typed ToolSchema', async () => {
const schema: ToolSchema = await createLLMToolSchema(mockToolConfig, {})
@@ -304,7 +551,6 @@ describe('Tool Parameters Utils', () => {
expect(typeof schema.properties).toBe('object')
expect(Array.isArray(schema.required)).toBe(true)
// Verify properties have correct structure
Object.values(schema.properties).forEach((prop) => {
expect(prop).toHaveProperty('type')
expect(prop).toHaveProperty('description')

View File

@@ -395,31 +395,39 @@ export async function createLLMToolSchema(
// Only include parameters that the LLM should/can provide
for (const [paramId, param] of Object.entries(toolConfig.params)) {
const isUserProvided =
userProvidedParams[paramId] !== undefined &&
userProvidedParams[paramId] !== null &&
userProvidedParams[paramId] !== ''
// Special handling for workflow_executor's inputMapping parameter
// Always include in LLM schema so LLM can provide dynamic input values
// even if user has configured empty/partial inputMapping in the UI
const isWorkflowInputMapping =
toolConfig.id === 'workflow_executor' && paramId === 'inputMapping'
// Skip parameters that user has already provided
if (isUserProvided) {
continue
}
if (!isWorkflowInputMapping) {
const isUserProvided =
userProvidedParams[paramId] !== undefined &&
userProvidedParams[paramId] !== null &&
userProvidedParams[paramId] !== ''
// Skip parameters that are user-only (never shown to LLM)
if (param.visibility === 'user-only') {
continue
}
// Skip parameters that user has already provided
if (isUserProvided) {
continue
}
// Skip hidden parameters
if (param.visibility === 'hidden') {
continue
// Skip parameters that are user-only (never shown to LLM)
if (param.visibility === 'user-only') {
continue
}
// Skip hidden parameters
if (param.visibility === 'hidden') {
continue
}
}
// Add parameter to LLM schema
const propertySchema = buildParameterSchema(toolConfig.id, paramId, param)
// Special handling for workflow_executor's inputMapping parameter
if (toolConfig.id === 'workflow_executor' && paramId === 'inputMapping') {
// Apply dynamic schema enrichment for workflow_executor's inputMapping
if (isWorkflowInputMapping) {
const workflowId = userProvidedParams.workflowId as string
if (workflowId) {
await applyDynamicSchemaForWorkflow(propertySchema, workflowId)
@@ -571,10 +579,60 @@ export function createExecutionToolSchema(toolConfig: ToolConfig): ToolSchema {
return schema
}
/**
* Deep merges inputMapping objects, where LLM values fill in empty/missing user values.
* User-provided non-empty values take precedence.
*/
export function deepMergeInputMapping(
llmInputMapping: Record<string, unknown> | undefined,
userInputMapping: Record<string, unknown> | string | undefined
): Record<string, unknown> {
// Parse user inputMapping if it's a JSON string
let parsedUserMapping: Record<string, unknown> = {}
if (typeof userInputMapping === 'string') {
try {
const parsed = JSON.parse(userInputMapping)
if (typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed)) {
parsedUserMapping = parsed
}
} catch {
// Invalid JSON, treat as empty
}
} else if (
typeof userInputMapping === 'object' &&
userInputMapping !== null &&
!Array.isArray(userInputMapping)
) {
parsedUserMapping = userInputMapping
}
// If no LLM mapping, return user mapping (or empty)
if (!llmInputMapping || typeof llmInputMapping !== 'object') {
return parsedUserMapping
}
// Deep merge: LLM values as base, user non-empty values override
// If user provides empty object {}, LLM values fill all fields (intentional)
const merged: Record<string, unknown> = { ...llmInputMapping }
for (const [key, userValue] of Object.entries(parsedUserMapping)) {
// Only override LLM value if user provided a non-empty value
// Note: Using strict inequality (!==) so 0 and false are correctly preserved
if (userValue !== undefined && userValue !== null && userValue !== '') {
merged[key] = userValue
}
}
return merged
}
/**
* Merges user-provided parameters with LLM-generated parameters.
* User-provided parameters take precedence, but empty strings are skipped
* so that LLM-generated values are used when user clears a field.
*
* Special handling for inputMapping: deep merges so LLM can fill in
* fields that user left empty in the UI.
*/
export function mergeToolParameters(
userProvidedParams: Record<string, unknown>,
@@ -589,11 +647,31 @@ export function mergeToolParameters(
}
}
// User-provided parameters take precedence (after filtering empty values)
return {
...llmGeneratedParams,
...filteredUserParams,
// Start with LLM params as base
const result: Record<string, unknown> = { ...llmGeneratedParams }
// Apply user params, with special handling for inputMapping
for (const [key, userValue] of Object.entries(filteredUserParams)) {
if (key === 'inputMapping') {
// Deep merge inputMapping so LLM values fill in empty user fields
const llmInputMapping = llmGeneratedParams.inputMapping as Record<string, unknown> | undefined
const mergedInputMapping = deepMergeInputMapping(
llmInputMapping,
userValue as Record<string, unknown> | string | undefined
)
result.inputMapping = mergedInputMapping
} else {
// Normal override for other params
result[key] = userValue
}
}
// If LLM provided inputMapping but user didn't, ensure it's included
if (llmGeneratedParams.inputMapping && !filteredUserParams.inputMapping) {
result.inputMapping = llmGeneratedParams.inputMapping
}
return result
}
/**