mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 23:45:07 -05:00
Checkpoint
This commit is contained in:
@@ -46,6 +46,14 @@ class TTLStore<T> {
|
||||
return entry.value
|
||||
}
|
||||
|
||||
upsert(id: string, value: T): void {
|
||||
this.gc()
|
||||
this.data.set(id, {
|
||||
value,
|
||||
expiresAt: Date.now() + this.ttlMs,
|
||||
})
|
||||
}
|
||||
|
||||
private gc(): void {
|
||||
const now = Date.now()
|
||||
for (const [key, entry] of this.data.entries()) {
|
||||
@@ -156,6 +164,37 @@ export async function getContextPack(id: string): Promise<WorkflowContextPack |
|
||||
return contextPackStore.get(id)
|
||||
}
|
||||
|
||||
export async function updateContextPack(
|
||||
id: string,
|
||||
patch: Partial<WorkflowContextPack>
|
||||
): Promise<WorkflowContextPack | null> {
|
||||
const existing = await getContextPack(id)
|
||||
if (!existing) return null
|
||||
const merged: WorkflowContextPack = {
|
||||
...existing,
|
||||
...patch,
|
||||
workflowState: patch.workflowState || existing.workflowState,
|
||||
schemasByType: patch.schemasByType || existing.schemasByType,
|
||||
schemaRefsByType: patch.schemaRefsByType || existing.schemaRefsByType,
|
||||
summary: patch.summary || existing.summary,
|
||||
}
|
||||
|
||||
if (!getRedisClient()) {
|
||||
contextPackStore.upsert(id, merged)
|
||||
return merged
|
||||
}
|
||||
|
||||
try {
|
||||
await writeRedisJson(getContextRedisKey(id), merged)
|
||||
contextPackStore.upsert(id, merged)
|
||||
return merged
|
||||
} catch (error) {
|
||||
logger.warn('Redis update failed for workflow context pack, using memory fallback', { error })
|
||||
contextPackStore.upsert(id, merged)
|
||||
return merged
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveProposal(proposal: WorkflowChangeProposal): Promise<string> {
|
||||
if (!getRedisClient()) {
|
||||
return proposalStore.set(proposal)
|
||||
|
||||
@@ -14,6 +14,10 @@ import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { getTool } from '@/tools/utils'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { normalizeName, parseReferencePath, REFERENCE } from '@/executor/constants'
|
||||
import { getBlockSchema } from '@/executor/utils/block-data'
|
||||
import { InvalidFieldError, type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import { replaceValidReferences } from '@/executor/utils/reference-validation'
|
||||
import {
|
||||
getContextPack,
|
||||
getProposal,
|
||||
@@ -25,6 +29,7 @@ import { applyOperationsToWorkflowState } from './workflow-operations/engine'
|
||||
import { preValidateCredentialInputs, validateInputsForBlock } from './workflow-operations/validation'
|
||||
import { workflowVerifyServerTool } from './workflow-verify'
|
||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('WorkflowChangeServerTool')
|
||||
|
||||
@@ -116,7 +121,7 @@ const RemoveBlockMutationSchema = z
|
||||
const InsertIntoSubflowMutationSchema = z
|
||||
.object({
|
||||
action: z.literal('insert_into_subflow'),
|
||||
target: TargetSchema,
|
||||
target: TargetSchema.optional(),
|
||||
subflow: TargetSchema,
|
||||
type: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
@@ -335,6 +340,10 @@ type ConnectionTarget = {
|
||||
type ConnectionState = Map<string, Map<string, ConnectionTarget[]>>
|
||||
|
||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
|
||||
const CONTAINER_INPUT_FIELDS: Record<string, string[]> = {
|
||||
loop: ['loopType', 'iterations', 'collection', 'condition'],
|
||||
parallel: ['parallelType', 'count', 'collection'],
|
||||
}
|
||||
|
||||
function createDraftBlockId(_seed?: string): string {
|
||||
return crypto.randomUUID()
|
||||
@@ -354,6 +363,215 @@ function stableUnique(values: string[]): string[] {
|
||||
return [...new Set(values.filter(Boolean))]
|
||||
}
|
||||
|
||||
function isContainerBlockType(blockType: string | null | undefined): boolean {
|
||||
return blockType === 'loop' || blockType === 'parallel'
|
||||
}
|
||||
|
||||
type ReferenceValidationContext = {
|
||||
blockNameMapping: Record<string, string>
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
function createSerializedBlockForReferenceValidation(
|
||||
blockId: string,
|
||||
block: Record<string, any>
|
||||
): SerializedBlock | null {
|
||||
const blockType = typeof block.type === 'string' ? block.type : ''
|
||||
if (!blockType) {
|
||||
return null
|
||||
}
|
||||
|
||||
const params = Object.fromEntries(
|
||||
Object.entries(block.subBlocks || {}).map(([subBlockId, subBlock]) => [
|
||||
subBlockId,
|
||||
(subBlock as { value?: unknown })?.value,
|
||||
])
|
||||
)
|
||||
|
||||
return {
|
||||
id: blockId,
|
||||
position: { x: 0, y: 0 },
|
||||
config: {
|
||||
tool: blockType,
|
||||
params,
|
||||
},
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
metadata: {
|
||||
id: blockType,
|
||||
name: typeof block.name === 'string' ? block.name : blockId,
|
||||
},
|
||||
enabled: typeof block.enabled === 'boolean' ? block.enabled : true,
|
||||
}
|
||||
}
|
||||
|
||||
function buildReferenceValidationContext(workflowState: {
|
||||
blocks: Record<string, any>
|
||||
}): ReferenceValidationContext {
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
||||
const serializedBlock = createSerializedBlockForReferenceValidation(
|
||||
blockId,
|
||||
block as Record<string, any>
|
||||
)
|
||||
if (!serializedBlock) {
|
||||
continue
|
||||
}
|
||||
|
||||
blockNameMapping[normalizeName(blockId)] = blockId
|
||||
const blockName = String((block as Record<string, unknown>).name || '').trim()
|
||||
if (blockName) {
|
||||
blockNameMapping[normalizeName(blockName)] = blockId
|
||||
}
|
||||
|
||||
const schema = getBlockSchema(serializedBlock)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[blockId] = schema
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
}
|
||||
}
|
||||
|
||||
function extractLikelyReferences(value: string): string[] {
|
||||
const references = new Set<string>()
|
||||
replaceValidReferences(value, (match) => {
|
||||
references.add(match.trim())
|
||||
return match
|
||||
})
|
||||
return [...references]
|
||||
}
|
||||
|
||||
function validateReference(
|
||||
reference: string,
|
||||
context: ReferenceValidationContext
|
||||
): string | null {
|
||||
const trimmed = reference.trim()
|
||||
const parts = parseReferencePath(trimmed)
|
||||
if (parts.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
const [head, ...pathParts] = parts
|
||||
if (!head) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Keep variable/loop/parallel references warning-free at compile time because
|
||||
// they can be context-dependent and <...> may also be used for non-variable text.
|
||||
if (
|
||||
head === REFERENCE.PREFIX.VARIABLE ||
|
||||
head === REFERENCE.PREFIX.LOOP ||
|
||||
head === REFERENCE.PREFIX.PARALLEL
|
||||
) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
const result = resolveBlockReference(head, pathParts, {
|
||||
blockNameMapping: context.blockNameMapping,
|
||||
blockData: {},
|
||||
blockOutputSchemas: context.blockOutputSchemas,
|
||||
})
|
||||
if (!result) {
|
||||
return `reference "${trimmed}" points to unknown block "${head}"`
|
||||
}
|
||||
return null
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidFieldError) {
|
||||
return (
|
||||
`reference "${trimmed}" has invalid field path "${error.fieldPath}" ` +
|
||||
`for block "${error.blockName}". ` +
|
||||
`Available fields: ${error.availableFields.length > 0 ? error.availableFields.join(', ') : 'none'}`
|
||||
)
|
||||
}
|
||||
return `reference "${trimmed}" could not be validated`
|
||||
}
|
||||
}
|
||||
|
||||
function collectReferenceWarningsForValue(params: {
|
||||
value: unknown
|
||||
location: string
|
||||
context: ReferenceValidationContext
|
||||
sink: Set<string>
|
||||
}): void {
|
||||
const { value, location, context, sink } = params
|
||||
if (typeof value === 'string') {
|
||||
const references = extractLikelyReferences(value)
|
||||
for (const reference of references) {
|
||||
const warning = validateReference(reference, context)
|
||||
if (warning) {
|
||||
sink.add(`${location}: ${warning}`)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value.forEach((item, index) => {
|
||||
collectReferenceWarningsForValue({
|
||||
value: item,
|
||||
location: `${location}[${index}]`,
|
||||
context,
|
||||
sink,
|
||||
})
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (value && typeof value === 'object') {
|
||||
for (const [key, child] of Object.entries(value as Record<string, unknown>)) {
|
||||
collectReferenceWarningsForValue({
|
||||
value: child,
|
||||
location: `${location}.${key}`,
|
||||
context,
|
||||
sink,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function collectReferenceWarningsForChangeSpec(params: {
|
||||
changeSpec: ChangeSpec
|
||||
workflowState: { blocks: Record<string, any> }
|
||||
}): string[] {
|
||||
const { changeSpec, workflowState } = params
|
||||
const context = buildReferenceValidationContext(workflowState)
|
||||
const warnings = new Set<string>()
|
||||
|
||||
for (const [mutationIndex, mutation] of (changeSpec.mutations || []).entries()) {
|
||||
if (mutation.action === 'ensure_block' || mutation.action === 'insert_into_subflow') {
|
||||
if (mutation.inputs) {
|
||||
collectReferenceWarningsForValue({
|
||||
value: mutation.inputs,
|
||||
location: `mutations[${mutationIndex}].inputs`,
|
||||
context,
|
||||
sink: warnings,
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (mutation.action === 'patch_block') {
|
||||
for (const [changeIndex, change] of mutation.changes.entries()) {
|
||||
collectReferenceWarningsForValue({
|
||||
value: change.value,
|
||||
location: `mutations[${mutationIndex}].changes[${changeIndex}].value`,
|
||||
context,
|
||||
sink: warnings,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [...warnings]
|
||||
}
|
||||
|
||||
function normalizeAcceptance(assertions: ChangeSpec['acceptance'] | undefined): string[] {
|
||||
if (!Array.isArray(assertions)) return []
|
||||
const toCanonicalAssertion = (
|
||||
@@ -633,10 +851,37 @@ function findMatchingBlockId(
|
||||
workflowState: { blocks: Record<string, any> },
|
||||
target: TargetRef
|
||||
): string | null {
|
||||
const normalizeToken = (value: string): string =>
|
||||
value
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '')
|
||||
.trim()
|
||||
|
||||
if (target.blockId && workflowState.blocks[target.blockId]) {
|
||||
return target.blockId
|
||||
}
|
||||
|
||||
if (target.alias) {
|
||||
const aliasNorm = normalizeToken(target.alias)
|
||||
if (aliasNorm) {
|
||||
const aliasMatches = Object.entries(workflowState.blocks || {}).filter(([blockId, block]) => {
|
||||
const blockName = String((block as Record<string, unknown>).name || '')
|
||||
const blockIdNorm = normalizeToken(blockId)
|
||||
const blockNameNorm = normalizeToken(blockName)
|
||||
return blockIdNorm === aliasNorm || blockNameNorm === aliasNorm
|
||||
})
|
||||
if (aliasMatches.length === 1) {
|
||||
return aliasMatches[0][0]
|
||||
}
|
||||
if (aliasMatches.length > 1) {
|
||||
throw new Error(
|
||||
`ambiguous_target: alias "${target.alias}" resolved to ${aliasMatches.length} blocks ` +
|
||||
`(${aliasMatches.map(([id]) => id).join(', ')})`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (target.match) {
|
||||
const type = target.match.type
|
||||
const name = target.match.name?.toLowerCase()
|
||||
@@ -1009,6 +1254,9 @@ async function compileChangeSpec(params: {
|
||||
if (isSchemaLoaded(blockType)) {
|
||||
return true
|
||||
}
|
||||
if (isContainerBlockType(blockType)) {
|
||||
return true
|
||||
}
|
||||
// Intelligence-first fallback: compiler can still validate against registry schema
|
||||
// even when context pack did not include that type.
|
||||
if (getBlock(blockType)) {
|
||||
@@ -1298,12 +1546,16 @@ async function compileChangeSpec(params: {
|
||||
return
|
||||
}
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) {
|
||||
const knownInputIds = new Set(
|
||||
blockConfig
|
||||
? (blockConfig.subBlocks || []).map((subBlock) => subBlock.id)
|
||||
: CONTAINER_INPUT_FIELDS[blockType] || []
|
||||
)
|
||||
const allowsDynamicInputs = isContainerBlockType(blockType)
|
||||
if (!blockConfig && !allowsDynamicInputs) {
|
||||
diagnostics.push(`patch_block on ${targetId} failed: unknown block type "${blockType}"`)
|
||||
return
|
||||
}
|
||||
const knownInputIds = new Set((blockConfig.subBlocks || []).map((subBlock) => subBlock.id))
|
||||
const allowsDynamicInputs = blockType === 'loop' || blockType === 'parallel'
|
||||
if (!allowsDynamicInputs && !knownInputIds.has(inputKey)) {
|
||||
const knownFields = [...knownInputIds].sort()
|
||||
const preview = knownFields.slice(0, 12).join(', ')
|
||||
@@ -1581,23 +1833,30 @@ async function compileChangeSpec(params: {
|
||||
continue
|
||||
}
|
||||
|
||||
const targetId = resolveTarget(mutation.target, true)
|
||||
if (!targetId) {
|
||||
diagnostics.push('insert_into_subflow requires a resolvable target block')
|
||||
const targetId = mutation.target ? resolveTarget(mutation.target, true) : null
|
||||
if (mutation.target && !targetId) {
|
||||
diagnostics.push(
|
||||
'insert_into_subflow target could not be resolved. Use target.alias/target.match, ' +
|
||||
'or omit target and provide type+name to create directly inside the subflow.'
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
const existingBlock = workingState.blocks[targetId]
|
||||
if (existingBlock) {
|
||||
const existingBlock = targetId ? workingState.blocks[targetId] : undefined
|
||||
if (targetId && existingBlock) {
|
||||
const existingTargetId = targetId
|
||||
const existingType =
|
||||
String(existingBlock.type || '') || plannedBlockTypes.get(targetId) || mutation.type || ''
|
||||
String(existingBlock.type || '') ||
|
||||
plannedBlockTypes.get(existingTargetId) ||
|
||||
mutation.type ||
|
||||
''
|
||||
if (!existingType) {
|
||||
diagnostics.push(`insert_into_subflow on ${targetId} failed: unknown block type`)
|
||||
diagnostics.push(`insert_into_subflow on ${existingTargetId} failed: unknown block type`)
|
||||
continue
|
||||
}
|
||||
const existingName = String(mutation.name || existingBlock.name || '').trim()
|
||||
if (!existingName) {
|
||||
diagnostics.push(`insert_into_subflow on ${targetId} failed: missing block name`)
|
||||
diagnostics.push(`insert_into_subflow on ${existingTargetId} failed: missing block name`)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1608,7 +1867,7 @@ async function compileChangeSpec(params: {
|
||||
}
|
||||
if (mutation.inputs) {
|
||||
const validatedInputs = normalizeInputsWithSchema(
|
||||
targetId,
|
||||
existingTargetId,
|
||||
existingType,
|
||||
mutation.inputs,
|
||||
'patch_block'
|
||||
@@ -1623,21 +1882,22 @@ async function compileChangeSpec(params: {
|
||||
|
||||
operations.push({
|
||||
operation_type: 'insert_into_subflow',
|
||||
block_id: targetId,
|
||||
block_id: existingTargetId,
|
||||
params: insertParams,
|
||||
})
|
||||
workingState.blocks[targetId] = {
|
||||
workingState.blocks[existingTargetId] = {
|
||||
...existingBlock,
|
||||
data: { ...(existingBlock.data || {}), parentId: subflowId, extent: 'parent' },
|
||||
}
|
||||
touchedBlocks.add(targetId)
|
||||
touchedBlocks.add(existingTargetId)
|
||||
touchedBlocks.add(subflowId)
|
||||
continue
|
||||
}
|
||||
|
||||
if (!mutation.type || !mutation.name) {
|
||||
diagnostics.push(
|
||||
`insert_into_subflow for "${targetId}" requires type and name when creating a new child block`
|
||||
`insert_into_subflow requires type and name when creating a new child block` +
|
||||
(targetId ? ` (target: "${targetId}")` : '')
|
||||
)
|
||||
continue
|
||||
}
|
||||
@@ -1655,7 +1915,7 @@ async function compileChangeSpec(params: {
|
||||
let normalizedInputs: Record<string, any> | undefined
|
||||
if (mutation.inputs) {
|
||||
const validatedInputs = normalizeInputsWithSchema(
|
||||
targetId,
|
||||
targetId || blockId,
|
||||
mutation.type,
|
||||
mutation.inputs,
|
||||
'ensure_block'
|
||||
@@ -1700,7 +1960,9 @@ async function compileChangeSpec(params: {
|
||||
aliasMap.set(mutation.target.alias, blockId)
|
||||
recordResolved(mutation.target.alias, blockId)
|
||||
}
|
||||
recordResolved(targetId, blockId)
|
||||
if (targetId) {
|
||||
recordResolved(targetId, blockId)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1843,6 +2105,12 @@ async function compileChangeSpec(params: {
|
||||
})
|
||||
}
|
||||
|
||||
const referenceWarnings = collectReferenceWarningsForChangeSpec({
|
||||
changeSpec,
|
||||
workflowState: workingState,
|
||||
})
|
||||
warnings.push(...referenceWarnings)
|
||||
|
||||
return {
|
||||
operations,
|
||||
warnings,
|
||||
|
||||
@@ -2,7 +2,8 @@ import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { getContextPack, saveContextPack } from './change-store'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { getContextPack, saveContextPack, updateContextPack } from './change-store'
|
||||
import {
|
||||
buildSchemasByType,
|
||||
getAllKnownBlockTypes,
|
||||
@@ -31,6 +32,80 @@ const WorkflowContextExpandInputSchema = z.object({
|
||||
|
||||
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
||||
|
||||
const BLOCK_TYPE_ALIAS_MAP: Record<string, string> = {
|
||||
start: 'start_trigger',
|
||||
starttrigger: 'start_trigger',
|
||||
starter: 'start_trigger',
|
||||
trigger: 'start_trigger',
|
||||
loop: 'loop',
|
||||
parallel: 'parallel',
|
||||
parallelai: 'parallel',
|
||||
hitl: 'human_in_the_loop',
|
||||
humanintheloop: 'human_in_the_loop',
|
||||
routerv2: 'router_v2',
|
||||
}
|
||||
|
||||
function normalizeToken(value: string): string {
|
||||
return value
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '')
|
||||
}
|
||||
|
||||
function buildBlockTypeIndex(knownTypes: string[]): Map<string, string> {
|
||||
const index = new Map<string, string>()
|
||||
for (const blockType of knownTypes) {
|
||||
const canonicalType = String(blockType || '').trim()
|
||||
if (!canonicalType) continue
|
||||
|
||||
const normalizedType = normalizeToken(canonicalType)
|
||||
if (normalizedType && !index.has(normalizedType)) {
|
||||
index.set(normalizedType, canonicalType)
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(canonicalType)
|
||||
const displayName = String(blockConfig?.name || '').trim()
|
||||
const normalizedDisplayName = normalizeToken(displayName)
|
||||
if (normalizedDisplayName && !index.has(normalizedDisplayName)) {
|
||||
index.set(normalizedDisplayName, canonicalType)
|
||||
}
|
||||
}
|
||||
return index
|
||||
}
|
||||
|
||||
function resolveBlockTypes(
|
||||
requestedBlockTypes: string[],
|
||||
knownTypes: string[]
|
||||
): { resolved: string[]; unresolved: string[] } {
|
||||
const index = buildBlockTypeIndex(knownTypes)
|
||||
const resolved = new Set<string>()
|
||||
const unresolved = new Set<string>()
|
||||
|
||||
for (const rawType of requestedBlockTypes) {
|
||||
const normalized = normalizeToken(String(rawType || ''))
|
||||
if (!normalized) continue
|
||||
|
||||
const aliasResolved = BLOCK_TYPE_ALIAS_MAP[normalized]
|
||||
if (aliasResolved) {
|
||||
resolved.add(aliasResolved)
|
||||
continue
|
||||
}
|
||||
|
||||
const direct = index.get(normalized)
|
||||
if (direct) {
|
||||
resolved.add(direct)
|
||||
continue
|
||||
}
|
||||
|
||||
unresolved.add(String(rawType))
|
||||
}
|
||||
|
||||
return {
|
||||
resolved: [...resolved],
|
||||
unresolved: [...unresolved],
|
||||
}
|
||||
}
|
||||
|
||||
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
||||
if (!schemaRef) return null
|
||||
const [blockType] = schemaRef.split('@')
|
||||
@@ -68,20 +143,23 @@ export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetPara
|
||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||
|
||||
const blockTypesInWorkflow = Object.values(workflowState.blocks || {}).map((block: any) =>
|
||||
const knownTypes = getAllKnownBlockTypes()
|
||||
const blockTypesInWorkflowRaw = Object.values(workflowState.blocks || {}).map((block: any) =>
|
||||
String(block?.type || '')
|
||||
)
|
||||
const requestedTypes = params.includeBlockTypes || []
|
||||
const requestedTypesRaw = params.includeBlockTypes || []
|
||||
const resolvedWorkflowTypes = resolveBlockTypes(blockTypesInWorkflowRaw, knownTypes).resolved
|
||||
const resolvedRequestedTypes = resolveBlockTypes(requestedTypesRaw, knownTypes)
|
||||
const schemaMode =
|
||||
params.includeAllSchemas === true ? 'all' : (params.schemaMode || 'minimal')
|
||||
const candidateTypes =
|
||||
schemaMode === 'all'
|
||||
? getAllKnownBlockTypes()
|
||||
? knownTypes
|
||||
: schemaMode === 'workflow'
|
||||
? [...blockTypesInWorkflow, ...requestedTypes]
|
||||
: [...requestedTypes]
|
||||
? [...resolvedWorkflowTypes, ...resolvedRequestedTypes.resolved]
|
||||
: [...resolvedRequestedTypes.resolved]
|
||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
||||
const suggestedSchemaTypes = [...new Set(blockTypesInWorkflow.filter(Boolean))]
|
||||
const suggestedSchemaTypes = [...new Set(resolvedWorkflowTypes.filter(Boolean))]
|
||||
|
||||
const summary = summarizeWorkflowState(workflowState)
|
||||
const packId = await saveContextPack({
|
||||
@@ -115,6 +193,8 @@ export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetPara
|
||||
schemaRefsByType,
|
||||
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
||||
suggestedSchemaTypes,
|
||||
unresolvedRequestedBlockTypes: resolvedRequestedTypes.unresolved,
|
||||
knownBlockTypes: knownTypes,
|
||||
inScopeSchemas: schemasByType,
|
||||
}
|
||||
},
|
||||
@@ -142,17 +222,38 @@ export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpa
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const requestedBlockTypes = new Set<string>()
|
||||
const knownTypes = getAllKnownBlockTypes()
|
||||
const requestedBlockTypesRaw = new Set<string>()
|
||||
for (const blockType of params.blockTypes || []) {
|
||||
if (blockType) requestedBlockTypes.add(blockType)
|
||||
if (blockType) requestedBlockTypesRaw.add(String(blockType))
|
||||
}
|
||||
for (const schemaRef of params.schemaRefs || []) {
|
||||
const blockType = parseSchemaRefToBlockType(schemaRef)
|
||||
if (blockType) requestedBlockTypes.add(blockType)
|
||||
if (blockType) requestedBlockTypesRaw.add(blockType)
|
||||
}
|
||||
|
||||
const typesToExpand = [...requestedBlockTypes]
|
||||
const resolvedTypes = resolveBlockTypes([...requestedBlockTypesRaw], knownTypes)
|
||||
const typesToExpand = resolvedTypes.resolved
|
||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
||||
const mergedSchemasByType = {
|
||||
...(contextPack.schemasByType || {}),
|
||||
...schemasByType,
|
||||
}
|
||||
const mergedSchemaRefsByType = {
|
||||
...(contextPack.schemaRefsByType || {}),
|
||||
...schemaRefsByType,
|
||||
}
|
||||
const updatedContextPack = await updateContextPack(params.contextPackId, {
|
||||
schemasByType: mergedSchemasByType,
|
||||
schemaRefsByType: mergedSchemaRefsByType,
|
||||
})
|
||||
const warnings =
|
||||
resolvedTypes.unresolved.length > 0
|
||||
? [
|
||||
`Unknown block type(s): ${resolvedTypes.unresolved.join(', ')}. ` +
|
||||
'Use known block type IDs from knownBlockTypes.',
|
||||
]
|
||||
: []
|
||||
|
||||
return {
|
||||
success: true,
|
||||
@@ -161,6 +262,11 @@ export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpa
|
||||
snapshotHash: contextPack.snapshotHash,
|
||||
schemasByType,
|
||||
schemaRefsByType,
|
||||
loadedSchemaTypes: Object.keys(updatedContextPack?.schemasByType || mergedSchemasByType).sort(),
|
||||
resolvedBlockTypes: resolvedTypes.resolved,
|
||||
unresolvedBlockTypes: resolvedTypes.unresolved,
|
||||
knownBlockTypes: knownTypes,
|
||||
warnings,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
|
||||
const logger = createLogger('WorkflowContextState')
|
||||
const CONTAINER_BLOCK_TYPES = ['loop', 'parallel'] as const
|
||||
|
||||
function stableSortValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
@@ -86,6 +87,146 @@ function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
||||
}
|
||||
|
||||
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
||||
if (blockType === 'loop') {
|
||||
return {
|
||||
blockType: 'loop',
|
||||
blockName: 'Loop',
|
||||
category: 'blocks',
|
||||
triggerAllowed: false,
|
||||
hasTriggersConfig: false,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'loopType',
|
||||
type: 'dropdown',
|
||||
title: 'Loop Type',
|
||||
description: 'Loop mode: for, forEach, while, doWhile',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: ['for', 'forEach', 'while', 'doWhile'],
|
||||
defaultValue: 'for',
|
||||
min: null,
|
||||
max: null,
|
||||
},
|
||||
{
|
||||
id: 'iterations',
|
||||
type: 'short-input',
|
||||
title: 'Iterations',
|
||||
description: 'Iteration count for for-loops',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: null,
|
||||
defaultValue: 1,
|
||||
min: 1,
|
||||
max: null,
|
||||
},
|
||||
{
|
||||
id: 'collection',
|
||||
type: 'long-input',
|
||||
title: 'Collection',
|
||||
description: 'Collection expression for forEach loops',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: null,
|
||||
defaultValue: null,
|
||||
min: null,
|
||||
max: null,
|
||||
},
|
||||
{
|
||||
id: 'condition',
|
||||
type: 'long-input',
|
||||
title: 'Condition',
|
||||
description: 'Condition expression for while/doWhile loops',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: null,
|
||||
defaultValue: null,
|
||||
min: null,
|
||||
max: null,
|
||||
},
|
||||
],
|
||||
outputKeys: ['index', 'item', 'items'],
|
||||
longDescription: null,
|
||||
}
|
||||
}
|
||||
|
||||
if (blockType === 'parallel') {
|
||||
return {
|
||||
blockType: 'parallel',
|
||||
blockName: 'Parallel',
|
||||
category: 'blocks',
|
||||
triggerAllowed: false,
|
||||
hasTriggersConfig: false,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'parallelType',
|
||||
type: 'dropdown',
|
||||
title: 'Parallel Type',
|
||||
description: 'Parallel mode: count or collection',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: ['count', 'collection'],
|
||||
defaultValue: 'count',
|
||||
min: null,
|
||||
max: null,
|
||||
},
|
||||
{
|
||||
id: 'count',
|
||||
type: 'short-input',
|
||||
title: 'Count',
|
||||
description: 'Branch count when parallelType is count',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: null,
|
||||
defaultValue: 1,
|
||||
min: 1,
|
||||
max: null,
|
||||
},
|
||||
{
|
||||
id: 'collection',
|
||||
type: 'long-input',
|
||||
title: 'Collection',
|
||||
description: 'Collection expression when parallelType is collection',
|
||||
mode: null,
|
||||
placeholder: null,
|
||||
hidden: false,
|
||||
multiSelect: false,
|
||||
required: false,
|
||||
hasDynamicOptions: false,
|
||||
options: null,
|
||||
defaultValue: null,
|
||||
min: null,
|
||||
max: null,
|
||||
},
|
||||
],
|
||||
outputKeys: ['index', 'currentItem', 'items'],
|
||||
longDescription: null,
|
||||
}
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) return null
|
||||
|
||||
@@ -194,23 +335,146 @@ export function summarizeWorkflowState(workflowState: {
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}): Record<string, unknown> {
|
||||
const MAX_BLOCK_INVENTORY = 160
|
||||
const MAX_EDGE_INVENTORY = 240
|
||||
const blocks = workflowState.blocks || {}
|
||||
const edges = workflowState.edges || []
|
||||
const blockTypes: Record<string, number> = {}
|
||||
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
||||
const blockInventoryRaw: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
parentId: string | null
|
||||
triggerMode: boolean
|
||||
enabled: boolean
|
||||
}> = []
|
||||
|
||||
const normalizeReferenceToken = (value: string): string =>
|
||||
value
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '')
|
||||
.trim()
|
||||
|
||||
const dedupeStrings = (values: string[]): string[] => [...new Set(values.filter(Boolean))]
|
||||
const startOutputKeys = ['input', 'files', 'conversationId']
|
||||
const duplicateNameIndex = new Map<string, { name: string; blockIds: string[] }>()
|
||||
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const blockType = String((block as Record<string, unknown>).type || 'unknown')
|
||||
const blockRecord = block as Record<string, unknown>
|
||||
const dataRecord = (blockRecord.data as Record<string, unknown> | undefined) || undefined
|
||||
const blockType = String(blockRecord.type || 'unknown')
|
||||
const blockName = String(blockRecord.name || blockType)
|
||||
const parentId = String(dataRecord?.parentId || '').trim() || null
|
||||
const normalizedName = normalizeReferenceToken(blockName)
|
||||
|
||||
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
||||
if ((block as Record<string, unknown>).triggerMode === true) {
|
||||
if (blockRecord.triggerMode === true) {
|
||||
triggerBlocks.push({
|
||||
id: blockId,
|
||||
name: String((block as Record<string, unknown>).name || blockType),
|
||||
name: blockName,
|
||||
type: blockType,
|
||||
})
|
||||
}
|
||||
|
||||
blockInventoryRaw.push({
|
||||
id: blockId,
|
||||
name: blockName,
|
||||
type: blockType,
|
||||
parentId,
|
||||
triggerMode: blockRecord.triggerMode === true,
|
||||
enabled: blockRecord.enabled !== false,
|
||||
})
|
||||
|
||||
if (normalizedName) {
|
||||
const existing = duplicateNameIndex.get(normalizedName)
|
||||
if (existing) {
|
||||
existing.blockIds.push(blockId)
|
||||
} else {
|
||||
duplicateNameIndex.set(normalizedName, { name: blockName, blockIds: [blockId] })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const blockInventory = [...blockInventoryRaw]
|
||||
.sort((a, b) => a.name.localeCompare(b.name) || a.id.localeCompare(b.id))
|
||||
.slice(0, MAX_BLOCK_INVENTORY)
|
||||
const blockInventoryTruncated = blockInventoryRaw.length > MAX_BLOCK_INVENTORY
|
||||
|
||||
const blockNameById = new Map(blockInventoryRaw.map((entry) => [entry.id, entry.name]))
|
||||
const edgeInventoryRaw = edges.map((edge: any) => {
|
||||
const source = String(edge.source || '')
|
||||
const target = String(edge.target || '')
|
||||
const sourceHandle = String(edge.sourceHandle || '').trim() || null
|
||||
const targetHandle = String(edge.targetHandle || '').trim() || null
|
||||
return {
|
||||
source,
|
||||
sourceName: blockNameById.get(source) || source,
|
||||
sourceHandle,
|
||||
target,
|
||||
targetName: blockNameById.get(target) || target,
|
||||
targetHandle,
|
||||
}
|
||||
})
|
||||
const edgeInventory = edgeInventoryRaw
|
||||
.sort((a, b) => {
|
||||
const bySource = a.sourceName.localeCompare(b.sourceName)
|
||||
if (bySource !== 0) return bySource
|
||||
const byTarget = a.targetName.localeCompare(b.targetName)
|
||||
if (byTarget !== 0) return byTarget
|
||||
return a.source.localeCompare(b.source)
|
||||
})
|
||||
.slice(0, MAX_EDGE_INVENTORY)
|
||||
const edgeInventoryTruncated = edgeInventoryRaw.length > MAX_EDGE_INVENTORY
|
||||
|
||||
const duplicateBlockNames = [...duplicateNameIndex.values()]
|
||||
.filter((entry) => entry.blockIds.length > 1)
|
||||
.map((entry) => ({
|
||||
name: entry.name,
|
||||
count: entry.blockIds.length,
|
||||
blockIds: entry.blockIds.sort(),
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count || a.name.localeCompare(b.name))
|
||||
|
||||
const subflowChildrenMap = new Map<string, string[]>()
|
||||
for (const block of blockInventoryRaw) {
|
||||
if (!block.parentId) continue
|
||||
const existing = subflowChildrenMap.get(block.parentId) || []
|
||||
existing.push(block.id)
|
||||
subflowChildrenMap.set(block.parentId, existing)
|
||||
}
|
||||
const subflowChildren = [...subflowChildrenMap.entries()]
|
||||
.map(([subflowId, childBlockIds]) => {
|
||||
const subflowBlock = blockInventoryRaw.find((block) => block.id === subflowId)
|
||||
return {
|
||||
subflowId,
|
||||
subflowName: subflowBlock?.name || subflowId,
|
||||
subflowType: subflowBlock?.type || 'unknown',
|
||||
childBlockIds: childBlockIds.sort(),
|
||||
}
|
||||
})
|
||||
.sort((a, b) => a.subflowName.localeCompare(b.subflowName))
|
||||
|
||||
const referenceGuide = blockInventory.map((entry) => {
|
||||
const blockSchema = getBlock(entry.type)
|
||||
const schemaOutputKeys = Object.keys(blockSchema?.outputs || {})
|
||||
const outputKeys =
|
||||
entry.type === 'start'
|
||||
? dedupeStrings([...schemaOutputKeys, ...startOutputKeys])
|
||||
: dedupeStrings(schemaOutputKeys)
|
||||
const referenceToken =
|
||||
normalizeReferenceToken(entry.name) || normalizeReferenceToken(entry.type) || entry.id
|
||||
return {
|
||||
blockId: entry.id,
|
||||
blockName: entry.name,
|
||||
blockType: entry.type,
|
||||
parentId: entry.parentId,
|
||||
referenceToken,
|
||||
outputKeys,
|
||||
examples: outputKeys.slice(0, 4).map((key) => `<${referenceToken}.${key}>`),
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
blockCount: Object.keys(blocks).length,
|
||||
edgeCount: edges.length,
|
||||
@@ -218,9 +482,16 @@ export function summarizeWorkflowState(workflowState: {
|
||||
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
||||
blockTypes,
|
||||
triggerBlocks,
|
||||
blockInventory,
|
||||
blockInventoryTruncated,
|
||||
edgeInventory,
|
||||
edgeInventoryTruncated,
|
||||
duplicateBlockNames,
|
||||
subflowChildren,
|
||||
referenceGuide,
|
||||
}
|
||||
}
|
||||
|
||||
export function getAllKnownBlockTypes(): string[] {
|
||||
return getAllBlockTypes()
|
||||
return [...new Set([...getAllBlockTypes(), ...CONTAINER_BLOCK_TYPES])]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user