improvement(block-inputs): must parse json accurately + models max_tokens fix (#3033)

* improvement(block-inputs): must parse json accurately

* fix sheets typing

* add reference comment

* fix models

* revert blocks changes

* fix param to follow openai new convention
This commit is contained in:
Vikhyath Mondreti
2026-01-27 18:17:35 -08:00
committed by GitHub
parent ef613ef035
commit 65bc21608c
13 changed files with 62 additions and 17 deletions

View File

@@ -28,6 +28,7 @@ import type {
} from '@/executor/types'
import { streamingResponseFormatProcessor } from '@/executor/utils'
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
import { isJSONString } from '@/executor/utils/json'
import { filterOutputForLog } from '@/executor/utils/output-filter'
import { validateBlockType } from '@/executor/utils/permission-check'
import type { VariableResolver } from '@/executor/variables/resolver'
@@ -86,7 +87,7 @@ export class BlockExecutor {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
if (blockLog) {
blockLog.input = resolvedInputs
blockLog.input = this.parseJsonInputs(resolvedInputs)
}
} catch (error) {
cleanupSelfReference?.()
@@ -157,7 +158,14 @@ export class BlockExecutor {
const displayOutput = filterOutputForLog(block.metadata?.id || '', normalizedOutput, {
block,
})
this.callOnBlockComplete(ctx, node, block, resolvedInputs, displayOutput, duration)
this.callOnBlockComplete(
ctx,
node,
block,
this.parseJsonInputs(resolvedInputs),
displayOutput,
duration
)
}
return normalizedOutput
@@ -233,7 +241,7 @@ export class BlockExecutor {
blockLog.durationMs = duration
blockLog.success = false
blockLog.error = errorMessage
blockLog.input = input
blockLog.input = this.parseJsonInputs(input)
blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
}
@@ -248,7 +256,14 @@ export class BlockExecutor {
if (!isSentinel) {
const displayOutput = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
this.callOnBlockComplete(ctx, node, block, input, displayOutput, duration)
this.callOnBlockComplete(
ctx,
node,
block,
this.parseJsonInputs(input),
displayOutput,
duration
)
}
const hasErrorPort = this.hasErrorPortEdge(node)
@@ -336,6 +351,36 @@ export class BlockExecutor {
return { result: output }
}
/**
* Parse JSON string inputs to objects for log display only.
* Attempts to parse any string that looks like JSON.
* Returns a new object - does not mutate the original inputs.
*/
private parseJsonInputs(inputs: Record<string, any>): Record<string, any> {
let result = inputs
let hasChanges = false
for (const [key, value] of Object.entries(inputs)) {
// isJSONString is a quick heuristic (checks for { or [), not a validator.
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
if (typeof value !== 'string' || !isJSONString(value)) {
continue
}
try {
if (!hasChanges) {
result = { ...inputs }
hasChanges = true
}
result[key] = JSON.parse(value.trim())
} catch {
// Not valid JSON, keep original string
}
}
return result
}
private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void {
const blockId = node.id
const blockName = block.metadata?.name ?? blockId

View File

@@ -936,8 +936,8 @@ export class AgentBlockHandler implements BlockHandler {
systemPrompt: validMessages ? undefined : inputs.systemPrompt,
context: validMessages ? undefined : stringifyJSON(messages),
tools: formattedTools,
temperature: inputs.temperature,
maxTokens: inputs.maxTokens,
temperature: inputs.temperature != null ? Number(inputs.temperature) : undefined,
maxTokens: inputs.maxTokens != null ? Number(inputs.maxTokens) : undefined,
apiKey: inputs.apiKey,
azureEndpoint: inputs.azureEndpoint,
azureApiVersion: inputs.azureApiVersion,

View File

@@ -102,7 +102,7 @@ export const azureOpenAIProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
if (request.verbosity !== undefined) payload.verbosity = request.verbosity

View File

@@ -77,7 +77,7 @@ export const cerebrasProvider: ProviderConfig = {
messages: allMessages,
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {
type: 'json_schema',

View File

@@ -81,7 +81,7 @@ export const deepseekProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null

View File

@@ -349,7 +349,7 @@ export async function executeGeminiRequest(
if (request.temperature !== undefined) {
geminiConfig.temperature = request.temperature
}
if (request.maxTokens !== undefined) {
if (request.maxTokens != null) {
geminiConfig.maxOutputTokens = request.maxTokens
}
if (systemInstruction) {

View File

@@ -74,7 +74,7 @@ export const groqProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {

View File

@@ -91,7 +91,7 @@ export const mistralProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {

View File

@@ -105,7 +105,7 @@ export const ollamaProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {

View File

@@ -81,7 +81,7 @@ export const openaiProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.reasoningEffort !== undefined) payload.reasoning_effort = request.reasoningEffort
if (request.verbosity !== undefined) payload.verbosity = request.verbosity

View File

@@ -121,7 +121,7 @@ export const openRouterProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_tokens = request.maxTokens
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null
let hasActiveTools = false

View File

@@ -135,7 +135,7 @@ export const vllmProvider: ProviderConfig = {
}
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
if (request.maxTokens != null) payload.max_completion_tokens = request.maxTokens
if (request.responseFormat) {
payload.response_format = {

View File

@@ -92,7 +92,7 @@ export const xAIProvider: ProviderConfig = {
}
if (request.temperature !== undefined) basePayload.temperature = request.temperature
if (request.maxTokens !== undefined) basePayload.max_tokens = request.maxTokens
if (request.maxTokens != null) basePayload.max_completion_tokens = request.maxTokens
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null
if (tools?.length) {