mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-17 09:52:38 -05:00
* fix(billing): should allow restoring subscription (#1728) * fix(already-cancelled-sub): UI should allow restoring subscription * restore functionality fixed * fix * improvement(start): revert to start block * make it work with start block * fix start block persistence * cleanup triggers * debounce status checks * update docs * improvement(start): revert to start block * make it work with start block * fix start block persistence * cleanup triggers * debounce status checks * update docs * SSE v0.1 * v0.2 * v0.3 * v0.4 * v0.5 * v0.6 * broken checkpoint * Executor progress - everything preliminarily tested except while loops and triggers * Executor fixes * Fix var typing * Implement while loop execution * Loop and parallel result agg * Refactor v1 - loops work * Fix var resolution in for each loop * Fix while loop condition and variable resolution * Fix loop iteration counts * Fix loop badges * Clean logs * Fix variable references from start block * Fix condition block * Fix conditional convergence * Dont execute orphaned nodse * Code cleanup 1 and error surfacing * compile time try catch * Some fixes * Fix error throwing * Sentinels v1 * Fix multiple start and end nodes in loop * Edge restoration * Fix reachable nodes execution * Parallel subflows * Fix loop/parallel sentinel convergence * Loops and parallels orchestrator * Split executor * Variable resolution split * Dag phase * Refactor * Refactor * Refactor 3 * Lint + refactor * Lint + cleanup + refactor * Readability * Initial logs * Fix trace spans * Console pills for iters * Add input/output pills * Checkpoint * remove unused code * THIS IS THE COMMIT THAT CAN BREAK A LOT OF THINGS * ANOTHER BIG REFACTOR * Lint + fix tests * Fix webhook * Remove comment * Merge stash * Fix triggers? * Stuff * Fix error port * Lint * Consolidate state * Clean up some var resolution * Remove some var resolution logs * Fix chat * Fix chat triggers * Fix chat trigger fully * Snapshot refactor * Fix mcp and custom tools * Lint * Fix parallel default count and trace span overlay * Agent purple * Fix test * Fix test --------- Co-authored-by: Waleed <walif6@gmail.com> Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com> Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
176 lines
5.2 KiB
TypeScript
176 lines
5.2 KiB
TypeScript
import { createLogger } from '@/lib/logs/console/logger'
|
|
import type { ResponseFormatStreamProcessor } from '@/executor/types'
|
|
|
|
const logger = createLogger('ExecutorUtils')
|
|
|
|
/**
|
|
* Processes a streaming response to extract only the selected response format fields
|
|
* instead of streaming the full JSON wrapper.
|
|
*/
|
|
export class StreamingResponseFormatProcessor implements ResponseFormatStreamProcessor {
|
|
processStream(
|
|
originalStream: ReadableStream,
|
|
blockId: string,
|
|
selectedOutputs: string[],
|
|
responseFormat?: any
|
|
): ReadableStream {
|
|
const hasResponseFormatSelection = selectedOutputs.some((outputId) => {
|
|
const blockIdForOutput = outputId.includes('_')
|
|
? outputId.split('_')[0]
|
|
: outputId.split('.')[0]
|
|
return blockIdForOutput === blockId && outputId.includes('_')
|
|
})
|
|
|
|
if (!hasResponseFormatSelection || !responseFormat) {
|
|
return originalStream
|
|
}
|
|
|
|
const selectedFields = selectedOutputs
|
|
.filter((outputId) => {
|
|
const blockIdForOutput = outputId.includes('_')
|
|
? outputId.split('_')[0]
|
|
: outputId.split('.')[0]
|
|
return blockIdForOutput === blockId && outputId.includes('_')
|
|
})
|
|
.map((outputId) => outputId.substring(blockId.length + 1))
|
|
|
|
logger.info('Processing streaming response format', {
|
|
blockId,
|
|
selectedFields,
|
|
hasResponseFormat: !!responseFormat,
|
|
selectedFieldsCount: selectedFields.length,
|
|
})
|
|
|
|
return this.createProcessedStream(originalStream, selectedFields, blockId)
|
|
}
|
|
|
|
private createProcessedStream(
|
|
originalStream: ReadableStream,
|
|
selectedFields: string[],
|
|
blockId: string
|
|
): ReadableStream {
|
|
let buffer = ''
|
|
let hasProcessedComplete = false
|
|
|
|
const self = this
|
|
|
|
return new ReadableStream({
|
|
async start(controller) {
|
|
const reader = originalStream.getReader()
|
|
const decoder = new TextDecoder()
|
|
|
|
try {
|
|
while (true) {
|
|
const { done, value } = await reader.read()
|
|
|
|
if (done) {
|
|
if (buffer.trim() && !hasProcessedComplete) {
|
|
self.processCompleteJson(buffer, selectedFields, controller)
|
|
}
|
|
controller.close()
|
|
break
|
|
}
|
|
|
|
const chunk = decoder.decode(value, { stream: true })
|
|
buffer += chunk
|
|
|
|
if (!hasProcessedComplete) {
|
|
const processedChunk = self.processStreamingChunk(buffer, selectedFields)
|
|
|
|
if (processedChunk) {
|
|
controller.enqueue(new TextEncoder().encode(processedChunk))
|
|
hasProcessedComplete = true
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
logger.error('Error processing streaming response format:', { error, blockId })
|
|
controller.error(error)
|
|
} finally {
|
|
reader.releaseLock()
|
|
}
|
|
},
|
|
})
|
|
}
|
|
|
|
private processStreamingChunk(buffer: string, selectedFields: string[]): string | null {
|
|
try {
|
|
const parsed = JSON.parse(buffer.trim())
|
|
if (typeof parsed === 'object' && parsed !== null) {
|
|
const results: string[] = []
|
|
for (const field of selectedFields) {
|
|
if (field in parsed) {
|
|
const value = parsed[field]
|
|
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
|
results.push(formattedValue)
|
|
}
|
|
}
|
|
|
|
if (results.length > 0) {
|
|
const result = results.join('\n')
|
|
return result
|
|
}
|
|
|
|
return null
|
|
}
|
|
} catch (e) {}
|
|
|
|
const openBraces = (buffer.match(/\{/g) || []).length
|
|
const closeBraces = (buffer.match(/\}/g) || []).length
|
|
|
|
if (openBraces > 0 && openBraces === closeBraces) {
|
|
try {
|
|
const parsed = JSON.parse(buffer.trim())
|
|
if (typeof parsed === 'object' && parsed !== null) {
|
|
const results: string[] = []
|
|
for (const field of selectedFields) {
|
|
if (field in parsed) {
|
|
const value = parsed[field]
|
|
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
|
results.push(formattedValue)
|
|
}
|
|
}
|
|
|
|
if (results.length > 0) {
|
|
const result = results.join('\n')
|
|
return result
|
|
}
|
|
|
|
return null
|
|
}
|
|
} catch (e) {}
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
private processCompleteJson(
|
|
buffer: string,
|
|
selectedFields: string[],
|
|
controller: ReadableStreamDefaultController
|
|
): void {
|
|
try {
|
|
const parsed = JSON.parse(buffer.trim())
|
|
if (typeof parsed === 'object' && parsed !== null) {
|
|
const results: string[] = []
|
|
for (const field of selectedFields) {
|
|
if (field in parsed) {
|
|
const value = parsed[field]
|
|
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
|
results.push(formattedValue)
|
|
}
|
|
}
|
|
|
|
if (results.length > 0) {
|
|
const result = results.join('\n')
|
|
controller.enqueue(new TextEncoder().encode(result))
|
|
}
|
|
}
|
|
} catch (error) {
|
|
logger.warn('Failed to parse complete JSON in streaming processor:', { error })
|
|
}
|
|
}
|
|
}
|
|
|
|
export const streamingResponseFormatProcessor = new StreamingResponseFormatProcessor()
|