improvement(sockets): cleanup debounce logic + add flush mechanism to… (#1152)

* improvement(sockets): cleanup debounce logic + add flush mechanism to not lose ops

* fix optimistic update overwritten race condition

* fix

* fix forever stuck in processing
This commit is contained in:
Vikhyath Mondreti
2025-08-27 11:35:20 -07:00
committed by GitHub
parent 923c05239c
commit 89f7d2b943
8 changed files with 352 additions and 158 deletions

View File

@@ -163,8 +163,7 @@ export function Code({
// State management - useSubBlockValue with explicit streaming control
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
debounceMs: 150,
isStreaming: isAiStreaming, // Use AI streaming state directly
isStreaming: isAiStreaming,
onStreamingEnd: () => {
logger.debug('AI streaming ended, value persisted', { blockId, subBlockId })
},

View File

@@ -13,6 +13,7 @@ import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/c
import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand'
import type { SubBlockConfig } from '@/blocks/types'
import { useTagSelection } from '@/hooks/use-tag-selection'
import { useOperationQueueStore } from '@/stores/operation-queue/store'
const logger = createLogger('LongInput')
@@ -73,7 +74,6 @@ export function LongInput({
// State management - useSubBlockValue with explicit streaming control
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
debounceMs: 150,
isStreaming: wandHook?.isStreaming || false, // Use wand streaming state
onStreamingEnd: () => {
logger.debug('Wand streaming ended, value persisted', { blockId, subBlockId })
@@ -379,6 +379,11 @@ export function LongInput({
onScroll={handleScroll}
onWheel={handleWheel}
onKeyDown={handleKeyDown}
onBlur={() => {
try {
useOperationQueueStore.getState().flushDebouncedForBlock(blockId)
} catch {}
}}
onFocus={() => {
setShowEnvVars(false)
setShowTags(false)

View File

@@ -9,6 +9,7 @@ import { cn } from '@/lib/utils'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { useTagSelection } from '@/hooks/use-tag-selection'
import { useOperationQueueStore } from '@/stores/operation-queue/store'
const logger = createLogger('ShortInput')
@@ -329,6 +330,9 @@ export function ShortInput({
onBlur={() => {
setIsFocused(false)
setShowEnvVars(false)
try {
useOperationQueueStore.getState().flushDebouncedForBlock(blockId)
} catch {}
}}
onDrop={handleDrop}
onDragOver={handleDragOver}

View File

@@ -11,8 +11,7 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('SubBlockValue')
interface UseSubBlockValueOptions {
debounceMs?: number
isStreaming?: boolean // Explicit streaming state
isStreaming?: boolean
onStreamingEnd?: () => void
}
@@ -130,8 +129,21 @@ export function useSubBlockValue<T = any>(
if (!isEqual(valueRef.current, newValue)) {
valueRef.current = newValue
// Update local store immediately for UI responsiveness
// The collaborative function will also update it, but that's okay for idempotency
// Ensure we're passing the actual value, not a reference that might change
const valueCopy =
newValue === null
? null
: typeof newValue === 'object'
? JSON.parse(JSON.stringify(newValue))
: newValue
// If streaming, hold value locally and do not update global store to avoid render-phase updates
if (isStreaming) {
streamingValueRef.current = valueCopy
return
}
// Update local store immediately for UI responsiveness (non-streaming)
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
@@ -145,7 +157,7 @@ export function useSubBlockValue<T = any>(
},
}))
// Handle model changes for provider-based blocks - clear API key when provider changes
// Handle model changes for provider-based blocks - clear API key when provider changes (non-streaming)
if (
subBlockId === 'model' &&
isProviderBasedBlock &&
@@ -153,36 +165,18 @@ export function useSubBlockValue<T = any>(
typeof newValue === 'string'
) {
const currentApiKeyValue = useSubBlockStore.getState().getValue(blockId, 'apiKey')
// Only clear if there's currently an API key value
if (currentApiKeyValue && currentApiKeyValue !== '') {
const oldModelValue = storeValue as string
const oldProvider = oldModelValue ? getProviderFromModel(oldModelValue) : null
const newProvider = getProviderFromModel(newValue)
// Clear API key if provider changed
if (oldProvider !== newProvider) {
// Use collaborative function to clear the API key
collaborativeSetSubblockValue(blockId, 'apiKey', '')
}
}
}
// Ensure we're passing the actual value, not a reference that might change
const valueCopy =
newValue === null
? null
: typeof newValue === 'object'
? JSON.parse(JSON.stringify(newValue))
: newValue
// If streaming, just store the value without emitting
if (isStreaming) {
streamingValueRef.current = valueCopy
} else {
// Emit immediately - let the operation queue handle debouncing and deduplication
emitValue(valueCopy)
}
// Emit immediately - let the operation queue handle debouncing and deduplication
emitValue(valueCopy)
if (triggerWorkflowUpdate) {
useWorkflowStore.getState().triggerUpdate()

View File

@@ -334,6 +334,18 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
)
if (data.workflowId === urlWorkflowId) {
try {
const { useOperationQueueStore } = require('@/stores/operation-queue/store')
const hasPending = useOperationQueueStore
.getState()
.operations.some(
(op: any) => op.workflowId === data.workflowId && op.status !== 'confirmed'
)
if (hasPending) {
logger.info('Skipping copilot rehydration due to pending operations in queue')
return
}
} catch {}
try {
// Fetch fresh workflow state directly from API
const response = await fetch(`/api/workflows/${data.workflowId}`)
@@ -364,27 +376,38 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
)
})
// Update workflow store with fresh state from database
const newWorkflowState = {
blocks: workflowState.blocks || {},
edges: workflowState.edges || [],
loops: workflowState.loops || {},
parallels: workflowState.parallels || {},
lastSaved: workflowState.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed || false,
deployedAt: workflowState.deployedAt,
deploymentStatuses: workflowState.deploymentStatuses || {},
hasActiveSchedule: workflowState.hasActiveSchedule || false,
hasActiveWebhook: workflowState.hasActiveWebhook || false,
// Merge workflow store with server state (do not drop optimistic local state)
const existing = useWorkflowStore.getState()
const mergedBlocks = {
...(existing.blocks || {}),
...(workflowState.blocks || {}),
}
const edgeById = new Map<string, any>()
;(existing.edges || []).forEach((e: any) => edgeById.set(e.id, e))
;(workflowState.edges || []).forEach((e: any) => edgeById.set(e.id, e))
const mergedEdges = Array.from(edgeById.values())
useWorkflowStore.setState({
blocks: mergedBlocks,
edges: mergedEdges,
loops: workflowState.loops || existing.loops || {},
parallels: workflowState.parallels || existing.parallels || {},
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
deployedAt: workflowState.deployedAt || existing.deployedAt,
deploymentStatuses:
workflowState.deploymentStatuses || existing.deploymentStatuses || {},
hasActiveWebhook:
workflowState.hasActiveWebhook ?? existing.hasActiveWebhook ?? false,
})
useWorkflowStore.setState(newWorkflowState)
// Update subblock store with fresh values
// Merge subblock store values per workflow
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[data.workflowId]: subblockValues,
[data.workflowId]: {
...(state.workflowValues?.[data.workflowId] || {}),
...subblockValues,
},
},
}))
@@ -461,19 +484,31 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
// Update local stores with the fresh workflow state (same logic as YAML editor)
if (workflowData?.state && workflowData.id === urlWorkflowId) {
try {
const { useOperationQueueStore } = require('@/stores/operation-queue/store')
const hasPending = useOperationQueueStore
.getState()
.operations.some(
(op: any) => op.workflowId === workflowData.id && op.status !== 'confirmed'
)
if (hasPending) {
logger.info(
'Skipping workflow-state rehydration due to pending operations in queue'
)
return
}
} catch {}
logger.info('Updating local stores with fresh workflow state from server')
try {
// Import stores dynamically to avoid import issues
Promise.all([
import('@/stores/workflows/workflow/store'),
import('@/stores/workflows/subblock/store'),
import('@/stores/workflows/registry/store'),
])
.then(([{ useWorkflowStore }, { useSubBlockStore }, { useWorkflowRegistry }]) => {
.then(([{ useWorkflowStore }, { useSubBlockStore }]) => {
const workflowState = workflowData.state
// Extract subblock values from blocks before updating workflow store
const subblockValues: Record<string, Record<string, any>> = {}
Object.entries(workflowState.blocks || {}).forEach(([blockId, block]) => {
const blockState = block as any
@@ -483,36 +518,40 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
})
})
// Update workflow store with new state
const newWorkflowState = {
blocks: workflowState.blocks || {},
edges: workflowState.edges || [],
loops: workflowState.loops || {},
parallels: workflowState.parallels || {},
lastSaved: workflowState.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed || false,
deployedAt: workflowState.deployedAt,
deploymentStatuses: workflowState.deploymentStatuses || {},
hasActiveSchedule: workflowState.hasActiveSchedule || false,
hasActiveWebhook: workflowState.hasActiveWebhook || false,
const existing = useWorkflowStore.getState()
const mergedBlocks = {
...(existing.blocks || {}),
...(workflowState.blocks || {}),
}
const edgeById = new Map<string, any>()
;(existing.edges || []).forEach((e: any) => edgeById.set(e.id, e))
;(workflowState.edges || []).forEach((e: any) => edgeById.set(e.id, e))
const mergedEdges = Array.from(edgeById.values())
useWorkflowStore.setState({
blocks: mergedBlocks,
edges: mergedEdges,
loops: workflowState.loops || existing.loops || {},
parallels: workflowState.parallels || existing.parallels || {},
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
deployedAt: workflowState.deployedAt || existing.deployedAt,
deploymentStatuses:
workflowState.deploymentStatuses || existing.deploymentStatuses || {},
hasActiveWebhook:
workflowState.hasActiveWebhook ?? existing.hasActiveWebhook ?? false,
})
useWorkflowStore.setState(newWorkflowState)
// Update subblock store with fresh values
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[workflowData.id]: subblockValues,
[workflowData.id]: {
...(state.workflowValues?.[workflowData.id] || {}),
...subblockValues,
},
},
}))
// Note: Auto layout is not triggered here because:
// 1. For copilot edits: positions are already optimized by the backend
// 2. For other syncs: the existing positions should be preserved
// This prevents ID conflicts and unnecessary position updates
logger.info('Successfully updated local stores with fresh workflow state')
logger.info('Merged fresh workflow state with local state')
})
.catch((error) => {
logger.error('Failed to import stores for workflow state update:', error)
@@ -558,6 +597,16 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
`URL workflow changed from ${currentWorkflowId} to ${urlWorkflowId}, switching rooms`
)
try {
const { useOperationQueueStore } = require('@/stores/operation-queue/store')
// Flush debounced updates for the old workflow before switching rooms
if (currentWorkflowId) {
useOperationQueueStore.getState().flushDebouncedForWorkflow(currentWorkflowId)
} else {
useOperationQueueStore.getState().flushAllDebounced()
}
} catch {}
// Leave current workflow first if we're in one
if (currentWorkflowId) {
logger.info(`Leaving current workflow ${currentWorkflowId} before joining ${urlWorkflowId}`)
@@ -615,6 +664,11 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const leaveWorkflow = useCallback(() => {
if (socket && currentWorkflowId) {
logger.info(`Leaving workflow: ${currentWorkflowId}`)
try {
const { useOperationQueueStore } = require('@/stores/operation-queue/store')
useOperationQueueStore.getState().flushDebouncedForWorkflow(currentWorkflowId)
useOperationQueueStore.getState().cancelOperationsForWorkflow(currentWorkflowId)
} catch {}
socket.emit('leave-workflow')
setCurrentWorkflowId(null)
setPresenceUsers([])

View File

@@ -492,7 +492,6 @@ export function useCollaborativeWorkflow() {
(operation: string, target: string, payload: any, localAction: () => void) => {
if (isApplyingRemoteChange.current) return
// Skip socket operations when in diff mode
if (isShowingDiff) {
logger.debug('Skipping debounced socket operation in diff mode:', operation)
return
@@ -673,32 +672,6 @@ export function useCollaborativeWorkflow() {
(id: string, name: string) => {
executeQueuedOperation('update-name', 'block', { id, name }, () => {
workflowStore.updateBlockName(id, name)
// Handle pending subblock updates
const globalWindow = window as any
const pendingUpdates = globalWindow.__pendingSubblockUpdates
if (pendingUpdates && Array.isArray(pendingUpdates)) {
// Queue each subblock update individually
for (const update of pendingUpdates) {
const { blockId, subBlockId, newValue } = update
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: 'subblock-update',
target: 'subblock',
payload: { blockId, subblockId: subBlockId, value: newValue },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
subBlockStore.setValue(blockId, subBlockId, newValue)
}
// Clear the pending updates
globalWindow.__pendingSubblockUpdates = undefined
}
})
},
[

View File

@@ -93,7 +93,10 @@ function handleBeforeUnload(event: BeforeUnloadEvent): void {
}
}
// Note: Socket.IO handles real-time sync automatically
try {
const { useOperationQueueStore } = require('@/stores/operation-queue/store')
useOperationQueueStore.getState().flushAllDebounced()
} catch {}
// Standard beforeunload pattern
event.preventDefault()

View File

@@ -31,14 +31,26 @@ interface OperationQueueState {
cancelOperationsForBlock: (blockId: string) => void
cancelOperationsForVariable: (variableId: string) => void
flushAllDebounced: () => void
flushDebouncedForBlock: (blockId: string) => void
flushDebouncedForVariable: (variableId: string) => void
flushDebouncedForWorkflow: (workflowId: string) => void
cancelOperationsForWorkflow: (workflowId: string) => void
triggerOfflineMode: () => void
clearError: () => void
}
const retryTimeouts = new Map<string, NodeJS.Timeout>()
const operationTimeouts = new Map<string, NodeJS.Timeout>()
const subblockDebounceTimeouts = new Map<string, NodeJS.Timeout>()
const variableDebounceTimeouts = new Map<string, NodeJS.Timeout>()
type PendingDebouncedOperation = {
timeout: NodeJS.Timeout
op: Omit<QueuedOperation, 'timestamp' | 'retryCount' | 'status'>
}
const subblockDebounced = new Map<string, PendingDebouncedOperation>()
const variableDebounced = new Map<string, PendingDebouncedOperation>()
let emitWorkflowOperation:
| ((operation: string, target: string, payload: any, operationId?: string) => void)
@@ -59,8 +71,11 @@ export function registerEmitFunctions(
emitWorkflowOperation = workflowEmit
emitSubblockUpdate = subblockEmit
emitVariableUpdate = variableEmit
currentRegisteredWorkflowId = workflowId
}
let currentRegisteredWorkflowId: string | null = null
export const useOperationQueueStore = create<OperationQueueState>((set, get) => ({
operations: [],
isProcessing: false,
@@ -76,9 +91,9 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
const { blockId, subblockId } = operation.operation.payload
const debounceKey = `${blockId}-${subblockId}`
const existingTimeout = subblockDebounceTimeouts.get(debounceKey)
if (existingTimeout) {
clearTimeout(existingTimeout)
const existing = subblockDebounced.get(debounceKey)
if (existing) {
clearTimeout(existing.timeout)
}
set((state) => ({
@@ -95,23 +110,25 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
}))
const timeoutId = setTimeout(() => {
subblockDebounceTimeouts.delete(debounceKey)
const pending = subblockDebounced.get(debounceKey)
subblockDebounced.delete(debounceKey)
if (pending) {
const queuedOp: QueuedOperation = {
...pending.op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending',
}
const queuedOp: QueuedOperation = {
...operation,
timestamp: Date.now(),
retryCount: 0,
status: 'pending',
set((state) => ({
operations: [...state.operations, queuedOp],
}))
get().processNextOperation()
}
}, 25)
set((state) => ({
operations: [...state.operations, queuedOp],
}))
get().processNextOperation()
}, 25) // 25ms debounce for subblock operations - optimized for collaborative editing
subblockDebounceTimeouts.set(debounceKey, timeoutId)
subblockDebounced.set(debounceKey, { timeout: timeoutId, op: operation })
return
}
@@ -124,9 +141,9 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
const { variableId, field } = operation.operation.payload
const debounceKey = `${variableId}-${field}`
const existingTimeout = variableDebounceTimeouts.get(debounceKey)
if (existingTimeout) {
clearTimeout(existingTimeout)
const existing = variableDebounced.get(debounceKey)
if (existing) {
clearTimeout(existing.timeout)
}
set((state) => ({
@@ -143,23 +160,25 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
}))
const timeoutId = setTimeout(() => {
variableDebounceTimeouts.delete(debounceKey)
const pending = variableDebounced.get(debounceKey)
variableDebounced.delete(debounceKey)
if (pending) {
const queuedOp: QueuedOperation = {
...pending.op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending',
}
const queuedOp: QueuedOperation = {
...operation,
timestamp: Date.now(),
retryCount: 0,
status: 'pending',
set((state) => ({
operations: [...state.operations, queuedOp],
}))
get().processNextOperation()
}
}, 25)
set((state) => ({
operations: [...state.operations, queuedOp],
}))
get().processNextOperation()
}, 25) // 25ms debounce for variable operations - same as subblocks
variableDebounceTimeouts.set(debounceKey, timeoutId)
variableDebounced.set(debounceKey, { timeout: timeoutId, op: operation })
return
}
@@ -249,10 +268,10 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
) {
const { blockId, subblockId } = operation.operation.payload
const debounceKey = `${blockId}-${subblockId}`
const debounceTimeout = subblockDebounceTimeouts.get(debounceKey)
if (debounceTimeout) {
clearTimeout(debounceTimeout)
subblockDebounceTimeouts.delete(debounceKey)
const pending = subblockDebounced.get(debounceKey)
if (pending) {
clearTimeout(pending.timeout)
subblockDebounced.delete(debounceKey)
}
}
@@ -263,10 +282,10 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
) {
const { variableId, field } = operation.operation.payload
const debounceKey = `${variableId}-${field}`
const debounceTimeout = variableDebounceTimeouts.get(debounceKey)
if (debounceTimeout) {
clearTimeout(debounceTimeout)
variableDebounceTimeouts.delete(debounceKey)
const pending = variableDebounced.get(debounceKey)
if (pending) {
clearTimeout(pending.timeout)
variableDebounced.delete(debounceKey)
}
}
@@ -302,10 +321,10 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
) {
const { blockId, subblockId } = operation.operation.payload
const debounceKey = `${blockId}-${subblockId}`
const debounceTimeout = subblockDebounceTimeouts.get(debounceKey)
if (debounceTimeout) {
clearTimeout(debounceTimeout)
subblockDebounceTimeouts.delete(debounceKey)
const pending = subblockDebounced.get(debounceKey)
if (pending) {
clearTimeout(pending.timeout)
subblockDebounced.delete(debounceKey)
}
}
@@ -316,10 +335,10 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
) {
const { variableId, field } = operation.operation.payload
const debounceKey = `${variableId}-${field}`
const debounceTimeout = variableDebounceTimeouts.get(debounceKey)
if (debounceTimeout) {
clearTimeout(debounceTimeout)
variableDebounceTimeouts.delete(debounceKey)
const pending = variableDebounced.get(debounceKey)
if (pending) {
clearTimeout(pending.timeout)
variableDebounced.delete(debounceKey)
}
}
@@ -390,10 +409,17 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
return
}
// Find the first pending operation (FIFO - first in, first out)
const nextOperation = state.operations.find((op) => op.status === 'pending')
const nextOperation = currentRegisteredWorkflowId
? state.operations.find(
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
)
: state.operations.find((op) => op.status === 'pending')
if (!nextOperation) {
return // No pending operations
return
}
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
return
}
// Mark as processing
@@ -443,13 +469,13 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
// Cancel all debounce timeouts for this block's subblocks
const keysToDelete: string[] = []
for (const [key, timeout] of subblockDebounceTimeouts.entries()) {
for (const [key, pending] of subblockDebounced.entries()) {
if (key.startsWith(`${blockId}-`)) {
clearTimeout(timeout)
clearTimeout(pending.timeout)
keysToDelete.push(key)
}
}
keysToDelete.forEach((key) => subblockDebounceTimeouts.delete(key))
keysToDelete.forEach((key) => subblockDebounced.delete(key))
// Find and cancel operation timeouts for operations related to this block
const state = get()
@@ -503,13 +529,13 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
// Cancel all debounce timeouts for this variable
const keysToDelete: string[] = []
for (const [key, timeout] of variableDebounceTimeouts.entries()) {
for (const [key, pending] of variableDebounced.entries()) {
if (key.startsWith(`${variableId}-`)) {
clearTimeout(timeout)
clearTimeout(pending.timeout)
keysToDelete.push(key)
}
}
keysToDelete.forEach((key) => variableDebounceTimeouts.delete(key))
keysToDelete.forEach((key) => variableDebounced.delete(key))
// Find and cancel operation timeouts for operations related to this variable
const state = get()
@@ -560,6 +586,142 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
get().processNextOperation()
},
flushAllDebounced: () => {
const toEnqueue: Omit<QueuedOperation, 'timestamp' | 'retryCount' | 'status'>[] = []
subblockDebounced.forEach((pending, key) => {
clearTimeout(pending.timeout)
subblockDebounced.delete(key)
toEnqueue.push(pending.op)
})
variableDebounced.forEach((pending, key) => {
clearTimeout(pending.timeout)
variableDebounced.delete(key)
toEnqueue.push(pending.op)
})
if (toEnqueue.length === 0) return
set((state) => ({
operations: [
...state.operations,
...toEnqueue.map((op) => ({
...op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending' as const,
})),
],
}))
get().processNextOperation()
},
flushDebouncedForBlock: (blockId: string) => {
const toEnqueue: Omit<QueuedOperation, 'timestamp' | 'retryCount' | 'status'>[] = []
const keys: string[] = []
subblockDebounced.forEach((pending, key) => {
if (key.startsWith(`${blockId}-`)) {
clearTimeout(pending.timeout)
keys.push(key)
toEnqueue.push(pending.op)
}
})
keys.forEach((k) => subblockDebounced.delete(k))
if (toEnqueue.length === 0) return
set((state) => ({
operations: [
...state.operations,
...toEnqueue.map((op) => ({
...op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending' as const,
})),
],
}))
get().processNextOperation()
},
flushDebouncedForVariable: (variableId: string) => {
const toEnqueue: Omit<QueuedOperation, 'timestamp' | 'retryCount' | 'status'>[] = []
const keys: string[] = []
variableDebounced.forEach((pending, key) => {
if (key.startsWith(`${variableId}-`)) {
clearTimeout(pending.timeout)
keys.push(key)
toEnqueue.push(pending.op)
}
})
keys.forEach((k) => variableDebounced.delete(k))
if (toEnqueue.length === 0) return
set((state) => ({
operations: [
...state.operations,
...toEnqueue.map((op) => ({
...op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending' as const,
})),
],
}))
get().processNextOperation()
},
flushDebouncedForWorkflow: (workflowId: string) => {
const toEnqueue: Omit<QueuedOperation, 'timestamp' | 'retryCount' | 'status'>[] = []
const subblockKeys: string[] = []
subblockDebounced.forEach((pending, key) => {
if (pending.op.workflowId === workflowId) {
clearTimeout(pending.timeout)
subblockKeys.push(key)
toEnqueue.push(pending.op)
}
})
subblockKeys.forEach((k) => subblockDebounced.delete(k))
const variableKeys: string[] = []
variableDebounced.forEach((pending, key) => {
if (pending.op.workflowId === workflowId) {
clearTimeout(pending.timeout)
variableKeys.push(key)
toEnqueue.push(pending.op)
}
})
variableKeys.forEach((k) => variableDebounced.delete(k))
if (toEnqueue.length === 0) return
set((state) => ({
operations: [
...state.operations,
...toEnqueue.map((op) => ({
...op,
timestamp: Date.now(),
retryCount: 0,
status: 'pending' as const,
})),
],
}))
get().processNextOperation()
},
cancelOperationsForWorkflow: (workflowId: string) => {
const state = get()
retryTimeouts.forEach((timeout, opId) => {
const op = state.operations.find((o) => o.id === opId)
if (op && op.workflowId === workflowId) {
clearTimeout(timeout)
retryTimeouts.delete(opId)
}
})
operationTimeouts.forEach((timeout, opId) => {
const op = state.operations.find((o) => o.id === opId)
if (op && op.workflowId === workflowId) {
clearTimeout(timeout)
operationTimeouts.delete(opId)
}
})
set((s) => ({
operations: s.operations.filter((op) => op.workflowId !== workflowId),
isProcessing: false,
}))
},
triggerOfflineMode: () => {
logger.error('Operation failed after retries - triggering offline mode')