mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-15 01:47:59 -05:00
Compare commits
5 Commits
feat/reord
...
fix/copilo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aca7f43e35 | ||
|
|
2423255944 | ||
|
|
3aee702340 | ||
|
|
7c8ca1c14a | ||
|
|
3f1dccd6aa |
@@ -52,6 +52,9 @@ const ChatMessageSchema = z.object({
|
||||
'gpt-5.1-high',
|
||||
'gpt-5-codex',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-codex',
|
||||
'gpt-5.2-pro',
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
|
||||
@@ -15,11 +15,14 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
|
||||
'gpt-5-medium': false,
|
||||
'gpt-5-high': false,
|
||||
'gpt-5.1-fast': false,
|
||||
'gpt-5.1': true,
|
||||
'gpt-5.1-medium': true,
|
||||
'gpt-5.1': false,
|
||||
'gpt-5.1-medium': false,
|
||||
'gpt-5.1-high': false,
|
||||
'gpt-5-codex': false,
|
||||
'gpt-5.1-codex': true,
|
||||
'gpt-5.1-codex': false,
|
||||
'gpt-5.2': false,
|
||||
'gpt-5.2-codex': true,
|
||||
'gpt-5.2-pro': true,
|
||||
o3: true,
|
||||
'claude-4-sonnet': false,
|
||||
'claude-4.5-haiku': true,
|
||||
|
||||
@@ -2,29 +2,9 @@ import { memo, useEffect, useRef, useState } from 'react'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Minimum delay between characters (fast catch-up mode)
|
||||
* Character animation delay in milliseconds
|
||||
*/
|
||||
const MIN_DELAY = 1
|
||||
|
||||
/**
|
||||
* Maximum delay between characters (when waiting for content)
|
||||
*/
|
||||
const MAX_DELAY = 12
|
||||
|
||||
/**
|
||||
* Default delay when streaming normally
|
||||
*/
|
||||
const DEFAULT_DELAY = 4
|
||||
|
||||
/**
|
||||
* How far behind (in characters) before we speed up
|
||||
*/
|
||||
const CATCH_UP_THRESHOLD = 20
|
||||
|
||||
/**
|
||||
* How close to content before we slow down
|
||||
*/
|
||||
const SLOW_DOWN_THRESHOLD = 5
|
||||
const CHARACTER_DELAY = 3
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
@@ -54,50 +34,21 @@ interface SmoothStreamingTextProps {
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates adaptive delay based on how far behind animation is from actual content
|
||||
*
|
||||
* @param displayedLength - Current displayed content length
|
||||
* @param totalLength - Total available content length
|
||||
* @returns Delay in milliseconds
|
||||
*/
|
||||
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
|
||||
const charsRemaining = totalLength - displayedLength
|
||||
|
||||
if (charsRemaining > CATCH_UP_THRESHOLD) {
|
||||
// Far behind - speed up to catch up
|
||||
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
|
||||
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
|
||||
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
|
||||
}
|
||||
|
||||
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
|
||||
// Close to content edge - slow down to feel natural
|
||||
// The closer we are, the slower we go (up to MAX_DELAY)
|
||||
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
|
||||
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
|
||||
}
|
||||
|
||||
// Normal streaming speed
|
||||
return DEFAULT_DELAY
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothStreamingText component displays text with character-by-character animation
|
||||
* Creates a smooth streaming effect for AI responses with adaptive speed
|
||||
*
|
||||
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
|
||||
* Creates a smooth streaming effect for AI responses
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Streaming text with smooth animation
|
||||
*/
|
||||
export const SmoothStreamingText = memo(
|
||||
({ content, isStreaming }: SmoothStreamingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const contentRef = useRef(content)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -110,42 +61,33 @@ export const SmoothStreamingText = memo(
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
const animateText = (timestamp: number) => {
|
||||
if (indexRef.current < content.length) {
|
||||
const animateText = () => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
// Calculate adaptive delay based on how far behind we are
|
||||
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
|
||||
|
||||
if (elapsed >= delay) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else if (indexRef.current < content.length && isAnimatingRef.current) {
|
||||
// Animation already running, it will pick up new content automatically
|
||||
if (!isAnimatingRef.current) {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
isAnimatingRef.current = true
|
||||
animateText()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
@@ -153,8 +95,8 @@ export const SmoothStreamingText = memo(
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
|
||||
@@ -46,12 +46,14 @@ interface SmoothThinkingTextProps {
|
||||
*/
|
||||
const SmoothThinkingText = memo(
|
||||
({ content, isStreaming }: SmoothThinkingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const [showGradient, setShowGradient] = useState(false)
|
||||
const contentRef = useRef(content)
|
||||
const textRef = useRef<HTMLDivElement>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
|
||||
@@ -1952,7 +1952,12 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}, [params])
|
||||
|
||||
// Skip rendering some internal tools
|
||||
if (toolCall.name === 'checkoff_todo' || toolCall.name === 'mark_todo_in_progress') return null
|
||||
if (
|
||||
toolCall.name === 'checkoff_todo' ||
|
||||
toolCall.name === 'mark_todo_in_progress' ||
|
||||
toolCall.name === 'tool_search_tool_regex'
|
||||
)
|
||||
return null
|
||||
|
||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||
const SUBAGENT_TOOLS = [
|
||||
|
||||
@@ -32,13 +32,6 @@ function getModelIconComponent(modelValue: string) {
|
||||
return <IconComponent className='h-3.5 w-3.5' />
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a model should display the MAX badge
|
||||
*/
|
||||
function isMaxModel(modelValue: string): boolean {
|
||||
return modelValue === 'claude-4.5-sonnet' || modelValue === 'claude-4.5-opus'
|
||||
}
|
||||
|
||||
/**
|
||||
* Model selector dropdown for choosing AI model.
|
||||
* Displays model icon and label.
|
||||
@@ -139,11 +132,6 @@ export function ModelSelector({ selectedModel, isNearTop, onModelSelect }: Model
|
||||
>
|
||||
{getModelIconComponent(option.value)}
|
||||
<span>{option.label}</span>
|
||||
{isMaxModel(option.value) && (
|
||||
<Badge size='sm' className='ml-auto'>
|
||||
MAX
|
||||
</Badge>
|
||||
)}
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverScrollArea>
|
||||
|
||||
@@ -238,8 +238,8 @@ export const MODEL_OPTIONS = [
|
||||
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
||||
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
||||
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
||||
{ value: 'gpt-5.1-codex', label: 'GPT 5.1 Codex' },
|
||||
{ value: 'gpt-5.1-medium', label: 'GPT 5.1 Medium' },
|
||||
{ value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' },
|
||||
{ value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' },
|
||||
{ value: 'gemini-3-pro', label: 'Gemini 3 Pro' },
|
||||
] as const
|
||||
|
||||
|
||||
@@ -172,7 +172,7 @@ async function executeWebhookJobInternal(
|
||||
const workflowVariables = (wfRows[0]?.variables as Record<string, any>) || {}
|
||||
|
||||
// Merge subblock states (matching workflow-execution pattern)
|
||||
const mergedStates = mergeSubblockState(blocks, {})
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
// Create serialized workflow
|
||||
const serializer = new Serializer()
|
||||
|
||||
@@ -77,6 +77,9 @@ export interface SendMessageRequest {
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
|
||||
80
apps/sim/lib/workflows/subblocks.ts
Normal file
80
apps/sim/lib/workflows/subblocks.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import type { BlockState, SubBlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export const DEFAULT_SUBBLOCK_TYPE = 'short-input'
|
||||
|
||||
/**
|
||||
* Merges subblock values into the provided subblock structures.
|
||||
* Falls back to a default subblock shape when a value has no structure.
|
||||
* @param subBlocks - Existing subblock definitions from the workflow
|
||||
* @param values - Stored subblock values keyed by subblock id
|
||||
* @returns Merged subblock structures with updated values
|
||||
*/
|
||||
export function mergeSubBlockValues(
|
||||
subBlocks: Record<string, unknown> | undefined,
|
||||
values: Record<string, unknown> | undefined
|
||||
): Record<string, unknown> {
|
||||
const merged = { ...(subBlocks || {}) } as Record<string, any>
|
||||
|
||||
if (!values) return merged
|
||||
|
||||
Object.entries(values).forEach(([subBlockId, value]) => {
|
||||
if (merged[subBlockId] && typeof merged[subBlockId] === 'object') {
|
||||
merged[subBlockId] = {
|
||||
...(merged[subBlockId] as Record<string, unknown>),
|
||||
value,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
merged[subBlockId] = {
|
||||
id: subBlockId,
|
||||
type: DEFAULT_SUBBLOCK_TYPE,
|
||||
value,
|
||||
}
|
||||
})
|
||||
|
||||
return merged
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges workflow block states with explicit subblock values while maintaining block structure.
|
||||
* Values that are null or undefined do not override existing subblock values.
|
||||
* @param blocks - Block configurations from workflow state
|
||||
* @param subBlockValues - Subblock values keyed by blockId -> subBlockId -> value
|
||||
* @param blockId - Optional specific block ID to merge (merges all if not provided)
|
||||
* @returns Merged block states with updated subblocks
|
||||
*/
|
||||
export function mergeSubblockStateWithValues(
|
||||
blocks: Record<string, BlockState>,
|
||||
subBlockValues: Record<string, Record<string, unknown>> = {},
|
||||
blockId?: string
|
||||
): Record<string, BlockState> {
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
|
||||
return Object.entries(blocksToProcess).reduce(
|
||||
(acc, [id, block]) => {
|
||||
if (!block) {
|
||||
return acc
|
||||
}
|
||||
|
||||
const blockSubBlocks = block.subBlocks || {}
|
||||
const blockValues = subBlockValues[id] || {}
|
||||
const filteredValues = Object.fromEntries(
|
||||
Object.entries(blockValues).filter(([, value]) => value !== null && value !== undefined)
|
||||
)
|
||||
|
||||
const mergedSubBlocks = mergeSubBlockValues(blockSubBlocks, filteredValues) as Record<
|
||||
string,
|
||||
SubBlockState
|
||||
>
|
||||
|
||||
acc[id] = {
|
||||
...block,
|
||||
subBlocks: mergedSubBlocks,
|
||||
}
|
||||
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockState>
|
||||
)
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import postgres from 'postgres'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { mergeSubBlockValues } from '@/lib/workflows/subblocks'
|
||||
import {
|
||||
BLOCK_OPERATIONS,
|
||||
BLOCKS_OPERATIONS,
|
||||
@@ -455,7 +456,7 @@ async function handleBlocksOperationTx(
|
||||
}
|
||||
|
||||
case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: {
|
||||
const { blocks, edges, loops, parallels } = payload
|
||||
const { blocks, edges, loops, parallels, subBlockValues } = payload
|
||||
|
||||
logger.info(`Batch adding blocks to workflow ${workflowId}`, {
|
||||
blockCount: blocks?.length || 0,
|
||||
@@ -465,22 +466,30 @@ async function handleBlocksOperationTx(
|
||||
})
|
||||
|
||||
if (blocks && blocks.length > 0) {
|
||||
const blockValues = blocks.map((block: Record<string, unknown>) => ({
|
||||
id: block.id as string,
|
||||
workflowId,
|
||||
type: block.type as string,
|
||||
name: block.name as string,
|
||||
positionX: (block.position as { x: number; y: number }).x,
|
||||
positionY: (block.position as { x: number; y: number }).y,
|
||||
data: (block.data as Record<string, unknown>) || {},
|
||||
subBlocks: (block.subBlocks as Record<string, unknown>) || {},
|
||||
outputs: (block.outputs as Record<string, unknown>) || {},
|
||||
enabled: (block.enabled as boolean) ?? true,
|
||||
horizontalHandles: (block.horizontalHandles as boolean) ?? true,
|
||||
advancedMode: (block.advancedMode as boolean) ?? false,
|
||||
triggerMode: (block.triggerMode as boolean) ?? false,
|
||||
height: (block.height as number) || 0,
|
||||
}))
|
||||
const blockValues = blocks.map((block: Record<string, unknown>) => {
|
||||
const blockId = block.id as string
|
||||
const mergedSubBlocks = mergeSubBlockValues(
|
||||
block.subBlocks as Record<string, unknown>,
|
||||
subBlockValues?.[blockId]
|
||||
)
|
||||
|
||||
return {
|
||||
id: blockId,
|
||||
workflowId,
|
||||
type: block.type as string,
|
||||
name: block.name as string,
|
||||
positionX: (block.position as { x: number; y: number }).x,
|
||||
positionY: (block.position as { x: number; y: number }).y,
|
||||
data: (block.data as Record<string, unknown>) || {},
|
||||
subBlocks: mergedSubBlocks,
|
||||
outputs: (block.outputs as Record<string, unknown>) || {},
|
||||
enabled: (block.enabled as boolean) ?? true,
|
||||
horizontalHandles: (block.horizontalHandles as boolean) ?? true,
|
||||
advancedMode: (block.advancedMode as boolean) ?? false,
|
||||
triggerMode: (block.triggerMode as boolean) ?? false,
|
||||
height: (block.height as number) || 0,
|
||||
}
|
||||
})
|
||||
|
||||
await tx.insert(workflowBlocks).values(blockValues)
|
||||
|
||||
|
||||
@@ -422,7 +422,8 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
|
||||
* Loads messages from DB for UI rendering.
|
||||
* Messages are stored exactly as they render, so we just need to:
|
||||
* 1. Register client tool instances for any tool calls
|
||||
* 2. Return the messages as-is
|
||||
* 2. Clear any streaming flags (messages loaded from DB are never actively streaming)
|
||||
* 3. Return the messages
|
||||
*/
|
||||
function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
try {
|
||||
@@ -438,23 +439,54 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
}
|
||||
}
|
||||
|
||||
// Register client tool instances for all tool calls so they can be looked up
|
||||
// Register client tool instances and clear streaming flags for all tool calls
|
||||
for (const message of messages) {
|
||||
if (message.contentBlocks) {
|
||||
for (const block of message.contentBlocks as any[]) {
|
||||
if (block?.type === 'tool_call' && block.toolCall) {
|
||||
registerToolCallInstances(block.toolCall)
|
||||
clearStreamingFlags(block.toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also clear from toolCalls array (legacy format)
|
||||
if (message.toolCalls) {
|
||||
for (const toolCall of message.toolCalls) {
|
||||
clearStreamingFlags(toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return messages as-is - they're already in the correct format for rendering
|
||||
return messages
|
||||
} catch {
|
||||
return messages
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively clears streaming flags from a tool call and its nested subagent tool calls.
|
||||
* This ensures messages loaded from DB don't appear to be streaming.
|
||||
*/
|
||||
function clearStreamingFlags(toolCall: any): void {
|
||||
if (!toolCall) return
|
||||
|
||||
// Always set subAgentStreaming to false - messages loaded from DB are never streaming
|
||||
toolCall.subAgentStreaming = false
|
||||
|
||||
// Clear nested subagent tool calls
|
||||
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||
for (const block of toolCall.subAgentBlocks) {
|
||||
if (block?.type === 'subagent_tool_call' && block.toolCall) {
|
||||
clearStreamingFlags(block.toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||
for (const subTc of toolCall.subAgentToolCalls) {
|
||||
clearStreamingFlags(subTc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively registers client tool instances for a tool call and its nested subagent tool calls.
|
||||
*/
|
||||
|
||||
@@ -106,6 +106,9 @@ export interface CopilotState {
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
* or React hooks, making it safe for use in Next.js API routes.
|
||||
*/
|
||||
|
||||
import type { BlockState, SubBlockState } from '@/stores/workflows/workflow/types'
|
||||
import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Server-safe version of mergeSubblockState for API routes
|
||||
@@ -26,72 +27,7 @@ export function mergeSubblockState(
|
||||
subBlockValues: Record<string, Record<string, any>> = {},
|
||||
blockId?: string
|
||||
): Record<string, BlockState> {
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
|
||||
return Object.entries(blocksToProcess).reduce(
|
||||
(acc, [id, block]) => {
|
||||
// Skip if block is undefined
|
||||
if (!block) {
|
||||
return acc
|
||||
}
|
||||
|
||||
// Initialize subBlocks if not present
|
||||
const blockSubBlocks = block.subBlocks || {}
|
||||
|
||||
// Get stored values for this block
|
||||
const blockValues = subBlockValues[id] || {}
|
||||
|
||||
// Create a deep copy of the block's subBlocks to maintain structure
|
||||
const mergedSubBlocks = Object.entries(blockSubBlocks).reduce(
|
||||
(subAcc, [subBlockId, subBlock]) => {
|
||||
// Skip if subBlock is undefined
|
||||
if (!subBlock) {
|
||||
return subAcc
|
||||
}
|
||||
|
||||
// Get the stored value for this subblock
|
||||
const storedValue = blockValues[subBlockId]
|
||||
|
||||
// Create a new subblock object with the same structure but updated value
|
||||
subAcc[subBlockId] = {
|
||||
...subBlock,
|
||||
value: storedValue !== undefined && storedValue !== null ? storedValue : subBlock.value,
|
||||
}
|
||||
|
||||
return subAcc
|
||||
},
|
||||
{} as Record<string, SubBlockState>
|
||||
)
|
||||
|
||||
// Return the full block state with updated subBlocks
|
||||
acc[id] = {
|
||||
...block,
|
||||
subBlocks: mergedSubBlocks,
|
||||
}
|
||||
|
||||
// Add any values that exist in the provided values but aren't in the block structure
|
||||
// This handles cases where block config has been updated but values still exist
|
||||
Object.entries(blockValues).forEach(([subBlockId, value]) => {
|
||||
if (!mergedSubBlocks[subBlockId] && value !== null && value !== undefined) {
|
||||
// Create a minimal subblock structure
|
||||
mergedSubBlocks[subBlockId] = {
|
||||
id: subBlockId,
|
||||
type: 'short-input', // Default type that's safe to use
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Update the block with the final merged subBlocks (including orphaned values)
|
||||
acc[id] = {
|
||||
...block,
|
||||
subBlocks: mergedSubBlocks,
|
||||
}
|
||||
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockState>
|
||||
)
|
||||
return mergeSubblockStateWithValues(blocks, subBlockValues, blockId)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,20 +1,7 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] {
|
||||
return edgesToAdd.filter((edge) => {
|
||||
if (edge.source === edge.target) return false
|
||||
return !currentEdges.some(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.target === edge.target &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { mergeSubBlockValues, mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -32,6 +19,19 @@ const WEBHOOK_SUBBLOCK_FIELDS = ['webhookId', 'triggerPath']
|
||||
|
||||
export { normalizeName }
|
||||
|
||||
export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] {
|
||||
return edgesToAdd.filter((edge) => {
|
||||
if (edge.source === edge.target) return false
|
||||
return !currentEdges.some(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.target === edge.target &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export interface RegeneratedState {
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
@@ -201,27 +201,20 @@ export function prepareDuplicateBlockState(options: PrepareDuplicateBlockStateOp
|
||||
Object.entries(subBlockValues).filter(([key]) => !WEBHOOK_SUBBLOCK_FIELDS.includes(key))
|
||||
)
|
||||
|
||||
const mergedSubBlocks: Record<string, SubBlockState> = sourceBlock.subBlocks
|
||||
const baseSubBlocks: Record<string, SubBlockState> = sourceBlock.subBlocks
|
||||
? JSON.parse(JSON.stringify(sourceBlock.subBlocks))
|
||||
: {}
|
||||
|
||||
WEBHOOK_SUBBLOCK_FIELDS.forEach((field) => {
|
||||
if (field in mergedSubBlocks) {
|
||||
delete mergedSubBlocks[field]
|
||||
if (field in baseSubBlocks) {
|
||||
delete baseSubBlocks[field]
|
||||
}
|
||||
})
|
||||
|
||||
Object.entries(filteredSubBlockValues).forEach(([subblockId, value]) => {
|
||||
if (mergedSubBlocks[subblockId]) {
|
||||
mergedSubBlocks[subblockId].value = value as SubBlockState['value']
|
||||
} else {
|
||||
mergedSubBlocks[subblockId] = {
|
||||
id: subblockId,
|
||||
type: 'short-input',
|
||||
value: value as SubBlockState['value'],
|
||||
}
|
||||
}
|
||||
})
|
||||
const mergedSubBlocks = mergeSubBlockValues(baseSubBlocks, filteredSubBlockValues) as Record<
|
||||
string,
|
||||
SubBlockState
|
||||
>
|
||||
|
||||
const block: BlockState = {
|
||||
id: newId,
|
||||
@@ -256,11 +249,16 @@ export function mergeSubblockState(
|
||||
workflowId?: string,
|
||||
blockId?: string
|
||||
): Record<string, BlockState> {
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
const subBlockStore = useSubBlockStore.getState()
|
||||
|
||||
const workflowSubblockValues = workflowId ? subBlockStore.workflowValues[workflowId] || {} : {}
|
||||
|
||||
if (workflowId) {
|
||||
return mergeSubblockStateWithValues(blocks, workflowSubblockValues, blockId)
|
||||
}
|
||||
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
|
||||
return Object.entries(blocksToProcess).reduce(
|
||||
(acc, [id, block]) => {
|
||||
if (!block) {
|
||||
@@ -339,9 +337,15 @@ export async function mergeSubblockStateAsync(
|
||||
workflowId?: string,
|
||||
blockId?: string
|
||||
): Promise<Record<string, BlockState>> {
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
const subBlockStore = useSubBlockStore.getState()
|
||||
|
||||
if (workflowId) {
|
||||
const workflowValues = subBlockStore.workflowValues[workflowId] || {}
|
||||
return mergeSubblockStateWithValues(blocks, workflowValues, blockId)
|
||||
}
|
||||
|
||||
const blocksToProcess = blockId ? { [blockId]: blocks[blockId] } : blocks
|
||||
|
||||
// Process blocks in parallel for better performance
|
||||
const processedBlockEntries = await Promise.all(
|
||||
Object.entries(blocksToProcess).map(async ([id, block]) => {
|
||||
@@ -358,16 +362,7 @@ export async function mergeSubblockStateAsync(
|
||||
return null
|
||||
}
|
||||
|
||||
let storedValue = null
|
||||
|
||||
if (workflowId) {
|
||||
const workflowValues = subBlockStore.workflowValues[workflowId]
|
||||
if (workflowValues?.[id]) {
|
||||
storedValue = workflowValues[id][subBlockId]
|
||||
}
|
||||
} else {
|
||||
storedValue = subBlockStore.getValue(id, subBlockId)
|
||||
}
|
||||
const storedValue = subBlockStore.getValue(id, subBlockId)
|
||||
|
||||
return [
|
||||
subBlockId,
|
||||
@@ -386,23 +381,6 @@ export async function mergeSubblockStateAsync(
|
||||
subBlockEntries.filter((entry): entry is readonly [string, SubBlockState] => entry !== null)
|
||||
) as Record<string, SubBlockState>
|
||||
|
||||
// Add any values that exist in the store but aren't in the block structure
|
||||
// This handles cases where block config has been updated but values still exist
|
||||
// IMPORTANT: This includes runtime subblock IDs like webhookId, triggerPath, etc.
|
||||
if (workflowId) {
|
||||
const workflowValues = subBlockStore.workflowValues[workflowId]
|
||||
const blockValues = workflowValues?.[id] || {}
|
||||
Object.entries(blockValues).forEach(([subBlockId, value]) => {
|
||||
if (!mergedSubBlocks[subBlockId] && value !== null && value !== undefined) {
|
||||
mergedSubBlocks[subBlockId] = {
|
||||
id: subBlockId,
|
||||
type: 'short-input',
|
||||
value: value as SubBlockState['value'],
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Return the full block state with updated subBlocks (including orphaned values)
|
||||
return [
|
||||
id,
|
||||
|
||||
@@ -639,7 +639,8 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
|
||||
const newName = getUniqueBlockName(block.name, get().blocks)
|
||||
|
||||
const mergedBlock = mergeSubblockState(get().blocks, id)[id]
|
||||
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
const mergedBlock = mergeSubblockState(get().blocks, activeWorkflowId || undefined, id)[id]
|
||||
|
||||
const newSubBlocks = Object.entries(mergedBlock.subBlocks).reduce(
|
||||
(acc, [subId, subBlock]) => ({
|
||||
@@ -668,7 +669,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
parallels: get().generateParallelBlocks(),
|
||||
}
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (activeWorkflowId) {
|
||||
const subBlockValues =
|
||||
useSubBlockStore.getState().workflowValues[activeWorkflowId]?.[id] || {}
|
||||
|
||||
Reference in New Issue
Block a user