mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-01 10:14:56 -05:00
improvement(ux): optimistic updates for envvars, custom tools, folder operations, workflow deletions. shared hook for connection tags & tag dropdown, fix for triggers not re-rendering on trigger selected (#1861)
This commit is contained in:
@@ -3,11 +3,14 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronDown, RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { extractFieldsFromSchema } from '@/lib/response-format'
|
||||
import type { ConnectedBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/hooks/use-block-connections'
|
||||
import { useBlockOutputFields } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-output-fields'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { getTool } from '@/tools/utils'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { FieldItem, type SchemaField, TREE_SPACING } from './components/field-item/field-item'
|
||||
|
||||
const logger = createLogger('ConnectionBlocks')
|
||||
@@ -22,144 +25,6 @@ const TREE_STYLES = {
|
||||
LINE_OFFSET: 4,
|
||||
} as const
|
||||
|
||||
const RESERVED_KEYS = new Set(['type', 'description'])
|
||||
|
||||
/**
|
||||
* Checks if a property is an object type
|
||||
*/
|
||||
const isObject = (prop: any): boolean => prop && typeof prop === 'object'
|
||||
|
||||
/**
|
||||
* Extracts nested fields from array or object properties
|
||||
*/
|
||||
const extractChildFields = (prop: any): SchemaField[] | undefined => {
|
||||
if (!isObject(prop)) return undefined
|
||||
|
||||
if (prop.properties && isObject(prop.properties)) {
|
||||
return extractNestedFields(prop.properties)
|
||||
}
|
||||
|
||||
if (prop.items?.properties && isObject(prop.items.properties)) {
|
||||
return extractNestedFields(prop.items.properties)
|
||||
}
|
||||
|
||||
if (!('type' in prop)) {
|
||||
return extractNestedFields(prop)
|
||||
}
|
||||
|
||||
if (prop.type === 'array') {
|
||||
const itemDefs = Object.fromEntries(
|
||||
Object.entries(prop).filter(([key]) => !RESERVED_KEYS.has(key))
|
||||
)
|
||||
if (Object.keys(itemDefs).length > 0) {
|
||||
return extractNestedFields(itemDefs)
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively extracts nested fields from output properties
|
||||
*/
|
||||
const extractNestedFields = (properties: Record<string, any>): SchemaField[] => {
|
||||
return Object.entries(properties).map(([name, prop]) => {
|
||||
const baseType = isObject(prop) && typeof prop.type === 'string' ? prop.type : 'string'
|
||||
const type = isObject(prop) && !('type' in prop) ? 'object' : baseType
|
||||
|
||||
return {
|
||||
name,
|
||||
type,
|
||||
description: isObject(prop) ? prop.description : undefined,
|
||||
children: extractChildFields(prop),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets tool outputs for a block's operation
|
||||
*/
|
||||
const getToolOutputs = (blockConfig: any, connection: ConnectedBlock): Record<string, any> => {
|
||||
if (!blockConfig?.tools?.config?.tool || !connection.operation) return {}
|
||||
|
||||
try {
|
||||
const toolId = blockConfig.tools.config.tool({ operation: connection.operation })
|
||||
if (!toolId) return {}
|
||||
|
||||
const toolConfig = getTool(toolId)
|
||||
return toolConfig?.outputs || {}
|
||||
} catch {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a schema field from an output definition
|
||||
*/
|
||||
const createFieldFromOutput = (
|
||||
name: string,
|
||||
output: any,
|
||||
responseFormatFields?: SchemaField[]
|
||||
): SchemaField => {
|
||||
const hasExplicitType = isObject(output) && typeof output.type === 'string'
|
||||
const type = hasExplicitType ? output.type : isObject(output) ? 'object' : 'string'
|
||||
|
||||
const field: SchemaField = {
|
||||
name,
|
||||
type,
|
||||
description: isObject(output) && 'description' in output ? output.description : undefined,
|
||||
}
|
||||
|
||||
if (name === 'data' && responseFormatFields && responseFormatFields.length > 0) {
|
||||
field.children = responseFormatFields
|
||||
} else {
|
||||
field.children = extractChildFields(output)
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds complete field list for a connection, combining base outputs and responseFormat
|
||||
*/
|
||||
const buildConnectionFields = (connection: ConnectedBlock): SchemaField[] => {
|
||||
const blockConfig = getBlock(connection.type)
|
||||
|
||||
if (!blockConfig && (connection.type === 'loop' || connection.type === 'parallel')) {
|
||||
return [
|
||||
{
|
||||
name: 'results',
|
||||
type: 'array',
|
||||
description: 'Array of results from the loop/parallel execution',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
const toolOutputs = getToolOutputs(blockConfig, connection)
|
||||
const baseOutputs =
|
||||
Object.keys(toolOutputs).length > 0
|
||||
? toolOutputs
|
||||
: connection.outputs || blockConfig?.outputs || {}
|
||||
|
||||
const responseFormatFields = extractFieldsFromSchema(connection.responseFormat)
|
||||
|
||||
if (responseFormatFields.length > 0 && Object.keys(baseOutputs).length === 0) {
|
||||
return responseFormatFields
|
||||
}
|
||||
|
||||
if (Object.keys(baseOutputs).length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Object.entries(baseOutputs).map(([name, output]) =>
|
||||
createFieldFromOutput(
|
||||
name,
|
||||
output,
|
||||
responseFormatFields.length > 0 ? responseFormatFields : undefined
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates total height of visible nested fields recursively
|
||||
*/
|
||||
@@ -192,6 +57,125 @@ const calculateFieldsHeight = (
|
||||
return totalHeight
|
||||
}
|
||||
|
||||
interface ConnectionItemProps {
|
||||
connection: ConnectedBlock
|
||||
isExpanded: boolean
|
||||
onToggleExpand: (connectionId: string) => void
|
||||
isFieldExpanded: (connectionId: string, fieldPath: string) => boolean
|
||||
onConnectionDragStart: (e: React.DragEvent, connection: ConnectedBlock) => void
|
||||
renderFieldTree: (
|
||||
fields: SchemaField[],
|
||||
parentPath: string,
|
||||
level: number,
|
||||
connection: ConnectedBlock
|
||||
) => React.ReactNode
|
||||
connectionRef: (el: HTMLDivElement | null) => void
|
||||
mergedSubBlocks: Record<string, any>
|
||||
sourceBlock: { triggerMode?: boolean } | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Individual connection item component that uses the hook
|
||||
*/
|
||||
function ConnectionItem({
|
||||
connection,
|
||||
isExpanded,
|
||||
onToggleExpand,
|
||||
isFieldExpanded,
|
||||
onConnectionDragStart,
|
||||
renderFieldTree,
|
||||
connectionRef,
|
||||
mergedSubBlocks,
|
||||
sourceBlock,
|
||||
}: ConnectionItemProps) {
|
||||
const blockConfig = getBlock(connection.type)
|
||||
|
||||
const fields = useBlockOutputFields({
|
||||
blockId: connection.id,
|
||||
blockType: connection.type,
|
||||
mergedSubBlocks,
|
||||
responseFormat: connection.responseFormat,
|
||||
operation: connection.operation,
|
||||
triggerMode: sourceBlock?.triggerMode,
|
||||
})
|
||||
const hasFields = fields.length > 0
|
||||
|
||||
let Icon = blockConfig?.icon
|
||||
let bgColor = blockConfig?.bgColor || '#6B7280'
|
||||
|
||||
if (!blockConfig) {
|
||||
if (connection.type === 'loop') {
|
||||
Icon = RepeatIcon as typeof Icon
|
||||
bgColor = '#2FB3FF'
|
||||
} else if (connection.type === 'parallel') {
|
||||
Icon = SplitIcon as typeof Icon
|
||||
bgColor = '#FEE12B'
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='mb-[2px] last:mb-0' ref={connectionRef}>
|
||||
<div
|
||||
draggable
|
||||
onDragStart={(e) => onConnectionDragStart(e, connection)}
|
||||
className={clsx(
|
||||
'group flex h-[25px] cursor-grab items-center gap-[8px] rounded-[8px] px-[5.5px] text-[14px] hover:bg-[#2C2C2C] active:cursor-grabbing dark:hover:bg-[#2C2C2C]',
|
||||
hasFields && 'cursor-pointer'
|
||||
)}
|
||||
onClick={() => hasFields && onToggleExpand(connection.id)}
|
||||
>
|
||||
<div
|
||||
className='relative flex h-[16px] w-[16px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
{Icon && (
|
||||
<Icon
|
||||
className={clsx(
|
||||
'text-white transition-transform duration-200',
|
||||
hasFields && 'group-hover:scale-110',
|
||||
'!h-[10px] !w-[10px]'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<span
|
||||
className={clsx(
|
||||
'truncate font-medium',
|
||||
'text-[#AEAEAE] group-hover:text-[#E6E6E6] dark:text-[#AEAEAE] dark:group-hover:text-[#E6E6E6]'
|
||||
)}
|
||||
>
|
||||
{connection.name}
|
||||
</span>
|
||||
{hasFields && (
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-3.5 w-3.5 flex-shrink-0 transition-transform',
|
||||
'text-[#AEAEAE] group-hover:text-[#E6E6E6] dark:text-[#AEAEAE] dark:group-hover:text-[#E6E6E6]',
|
||||
isExpanded && 'rotate-180'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isExpanded && hasFields && (
|
||||
<div className='relative'>
|
||||
<div
|
||||
className='pointer-events-none absolute'
|
||||
style={{
|
||||
left: `${TREE_SPACING.VERTICAL_LINE_LEFT_OFFSET}px`,
|
||||
top: `${TREE_STYLES.LINE_OFFSET}px`,
|
||||
width: '1px',
|
||||
height: `${calculateFieldsHeight(fields, '', connection.id, isFieldExpanded) - TREE_STYLES.LINE_OFFSET * 2}px`,
|
||||
background: TREE_STYLES.LINE_COLOR,
|
||||
}}
|
||||
/>
|
||||
{renderFieldTree(fields, '', 0, connection)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Connection blocks component that displays incoming connections with their schemas
|
||||
*/
|
||||
@@ -201,6 +185,31 @@ export function ConnectionBlocks({ connections, currentBlockId }: ConnectionBloc
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const connectionRefs = useRef<Map<string, HTMLDivElement>>(new Map())
|
||||
|
||||
const { blocks } = useWorkflowStore(
|
||||
(state) => ({
|
||||
blocks: state.blocks,
|
||||
}),
|
||||
shallow
|
||||
)
|
||||
|
||||
const workflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
const workflowSubBlockValues = useSubBlockStore((state) =>
|
||||
workflowId ? (state.workflowValues[workflowId] ?? {}) : {}
|
||||
)
|
||||
|
||||
const getMergedSubBlocks = useCallback(
|
||||
(sourceBlockId: string): Record<string, any> => {
|
||||
const base = blocks[sourceBlockId]?.subBlocks || {}
|
||||
const live = workflowSubBlockValues?.[sourceBlockId] || {}
|
||||
const merged: Record<string, any> = { ...base }
|
||||
for (const [subId, liveVal] of Object.entries(live)) {
|
||||
merged[subId] = { ...(base[subId] || {}), value: liveVal }
|
||||
}
|
||||
return merged
|
||||
},
|
||||
[blocks, workflowSubBlockValues]
|
||||
)
|
||||
|
||||
const toggleConnectionExpansion = useCallback((connectionId: string) => {
|
||||
setExpandedConnections((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
@@ -327,94 +336,28 @@ export function ConnectionBlocks({ connections, currentBlockId }: ConnectionBloc
|
||||
return (
|
||||
<div ref={scrollContainerRef} className='space-y-[2px]'>
|
||||
{connections.map((connection) => {
|
||||
const blockConfig = getBlock(connection.type)
|
||||
const isExpanded = expandedConnections.has(connection.id)
|
||||
const fields = buildConnectionFields(connection)
|
||||
const hasFields = fields.length > 0
|
||||
|
||||
let Icon = blockConfig?.icon
|
||||
let bgColor = blockConfig?.bgColor || '#6B7280'
|
||||
|
||||
if (!blockConfig) {
|
||||
if (connection.type === 'loop') {
|
||||
Icon = RepeatIcon as typeof Icon
|
||||
bgColor = '#2FB3FF'
|
||||
} else if (connection.type === 'parallel') {
|
||||
Icon = SplitIcon as typeof Icon
|
||||
bgColor = '#FEE12B'
|
||||
}
|
||||
}
|
||||
const mergedSubBlocks = getMergedSubBlocks(connection.id)
|
||||
const sourceBlock = blocks[connection.id]
|
||||
|
||||
return (
|
||||
<div
|
||||
<ConnectionItem
|
||||
key={connection.id}
|
||||
className='mb-[2px] last:mb-0'
|
||||
ref={(el) => {
|
||||
connection={connection}
|
||||
isExpanded={expandedConnections.has(connection.id)}
|
||||
onToggleExpand={toggleConnectionExpansion}
|
||||
isFieldExpanded={isFieldExpanded}
|
||||
onConnectionDragStart={handleConnectionDragStart}
|
||||
renderFieldTree={renderFieldTree}
|
||||
connectionRef={(el) => {
|
||||
if (el) {
|
||||
connectionRefs.current.set(connection.id, el)
|
||||
} else {
|
||||
connectionRefs.current.delete(connection.id)
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div
|
||||
draggable
|
||||
onDragStart={(e) => handleConnectionDragStart(e, connection)}
|
||||
className={clsx(
|
||||
'group flex h-[25px] cursor-grab items-center gap-[8px] rounded-[8px] px-[5.5px] text-[14px] hover:bg-[#2C2C2C] active:cursor-grabbing dark:hover:bg-[#2C2C2C]',
|
||||
hasFields && 'cursor-pointer'
|
||||
)}
|
||||
onClick={() => hasFields && toggleConnectionExpansion(connection.id)}
|
||||
>
|
||||
<div
|
||||
className='relative flex h-[16px] w-[16px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
{Icon && (
|
||||
<Icon
|
||||
className={clsx(
|
||||
'text-white transition-transform duration-200',
|
||||
hasFields && 'group-hover:scale-110',
|
||||
'!h-[10px] !w-[10px]'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<span
|
||||
className={clsx(
|
||||
'truncate font-medium',
|
||||
'text-[#AEAEAE] group-hover:text-[#E6E6E6] dark:text-[#AEAEAE] dark:group-hover:text-[#E6E6E6]'
|
||||
)}
|
||||
>
|
||||
{connection.name}
|
||||
</span>
|
||||
{hasFields && (
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-3.5 w-3.5 flex-shrink-0 transition-transform',
|
||||
'text-[#AEAEAE] group-hover:text-[#E6E6E6] dark:text-[#AEAEAE] dark:group-hover:text-[#E6E6E6]',
|
||||
isExpanded && 'rotate-180'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isExpanded && hasFields && (
|
||||
<div className='relative'>
|
||||
<div
|
||||
className='pointer-events-none absolute'
|
||||
style={{
|
||||
left: `${TREE_SPACING.VERTICAL_LINE_LEFT_OFFSET}px`,
|
||||
top: `${TREE_STYLES.LINE_OFFSET}px`,
|
||||
width: '1px',
|
||||
height: `${calculateFieldsHeight(fields, '', connection.id, isFieldExpanded) - TREE_STYLES.LINE_OFFSET * 2}px`,
|
||||
background: TREE_STYLES.LINE_COLOR,
|
||||
}}
|
||||
/>
|
||||
{renderFieldTree(fields, '', 0, connection)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
mergedSubBlocks={mergedSubBlocks}
|
||||
sourceBlock={sourceBlock}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
|
||||
@@ -493,7 +493,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
|
||||
)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (sourceBlock.type === 'variables') {
|
||||
@@ -515,7 +515,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
sourceBlock.type,
|
||||
mergedSubBlocks,
|
||||
sourceBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) {
|
||||
@@ -573,21 +577,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks, true)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (sourceBlock.type === 'approval') {
|
||||
// For approval block, use dynamic outputs based on inputFormat
|
||||
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
|
||||
// If it's a self-reference, only show url (available immediately)
|
||||
const isSelfReference = activeSourceBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
}
|
||||
@@ -601,7 +603,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
sourceBlock.type,
|
||||
mergedSubBlocks,
|
||||
sourceBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
}
|
||||
@@ -845,7 +851,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
|
||||
)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
const outputPaths = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (accessibleBlock.type === 'variables') {
|
||||
@@ -867,7 +873,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
accessibleBlock.type,
|
||||
mergedSubBlocks,
|
||||
accessibleBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (!blockConfig.outputs || Object.keys(blockConfig.outputs).length === 0) {
|
||||
@@ -879,21 +889,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (dynamicOutputs.length > 0) {
|
||||
blockTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks, true)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (accessibleBlock.type === 'approval') {
|
||||
// For approval block, use dynamic outputs based on inputFormat
|
||||
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
|
||||
// If it's a self-reference, only show url (available immediately)
|
||||
const isSelfReference = accessibleBlockId === blockId
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
|
||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||
}
|
||||
@@ -907,7 +915,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
} else {
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
|
||||
const outputPaths = getBlockOutputPaths(
|
||||
accessibleBlock.type,
|
||||
mergedSubBlocks,
|
||||
accessibleBlock.triggerMode
|
||||
)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,25 @@ export function useEditorSubblockLayout(
|
||||
const blocks = useWorkflowStore.getState().blocks || {}
|
||||
const mergedMap = mergeSubblockState(blocks, activeWorkflowId || undefined, blockId)
|
||||
const mergedState = mergedMap ? mergedMap[blockId] : undefined
|
||||
stateToUse = mergedState?.subBlocks || {}
|
||||
const mergedSubBlocks = mergedState?.subBlocks || {}
|
||||
|
||||
stateToUse = Object.keys(mergedSubBlocks).reduce(
|
||||
(acc, key) => {
|
||||
const value =
|
||||
blockSubBlockValues[key] !== undefined
|
||||
? blockSubBlockValues[key]
|
||||
: (mergedSubBlocks[key]?.value ?? null)
|
||||
acc[key] = { value }
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, { value: unknown }>
|
||||
)
|
||||
|
||||
Object.keys(blockSubBlockValues).forEach((key) => {
|
||||
if (!(key in stateToUse)) {
|
||||
stateToUse[key] = { value: blockSubBlockValues[key] }
|
||||
}
|
||||
})
|
||||
|
||||
// Filter visible blocks and those that meet their conditions
|
||||
const visibleSubBlocks = (config.subBlocks || []).filter((block) => {
|
||||
@@ -59,12 +77,12 @@ export function useEditorSubblockLayout(
|
||||
if (block.mode === 'advanced' && !displayAdvancedMode) return false
|
||||
if (block.mode === 'trigger') {
|
||||
// Show trigger mode blocks only when in trigger mode
|
||||
return displayTriggerMode
|
||||
if (!displayTriggerMode) return false
|
||||
}
|
||||
}
|
||||
|
||||
// When in trigger mode, hide blocks that don't have mode: 'trigger'
|
||||
if (displayTriggerMode) {
|
||||
if (displayTriggerMode && block.mode !== 'trigger') {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,373 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { extractFieldsFromSchema } from '@/lib/response-format'
|
||||
import { getBlockOutputPaths, getBlockOutputs } from '@/lib/workflows/block-outputs'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers'
|
||||
import type { SchemaField } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/connection-blocks/components/field-item/field-item'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
const RESERVED_KEYS = new Set(['type', 'description'])
|
||||
|
||||
/**
|
||||
* Checks if a property is an object type
|
||||
*/
|
||||
const isObject = (prop: any): boolean => prop && typeof prop === 'object'
|
||||
|
||||
/**
|
||||
* Gets a subblock value from the store
|
||||
*/
|
||||
const getSubBlockValue = (blockId: string, property: string): any => {
|
||||
return useSubBlockStore.getState().getValue(blockId, property)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates output paths for a tool-based block
|
||||
*/
|
||||
const generateToolOutputPaths = (blockConfig: BlockConfig, operation: string): string[] => {
|
||||
if (!blockConfig?.tools?.config?.tool) return []
|
||||
|
||||
try {
|
||||
const toolId = blockConfig.tools.config.tool({ operation })
|
||||
if (!toolId) return []
|
||||
|
||||
const toolConfig = getTool(toolId)
|
||||
if (!toolConfig?.outputs) return []
|
||||
|
||||
return generateOutputPaths(toolConfig.outputs)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively generates all output paths from an outputs schema
|
||||
*/
|
||||
const generateOutputPaths = (outputs: Record<string, any>, prefix = ''): string[] => {
|
||||
const paths: string[] = []
|
||||
|
||||
for (const [key, value] of Object.entries(outputs)) {
|
||||
const currentPath = prefix ? `${prefix}.${key}` : key
|
||||
|
||||
if (typeof value === 'string') {
|
||||
paths.push(currentPath)
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
if ('type' in value && typeof value.type === 'string') {
|
||||
paths.push(currentPath)
|
||||
// Handle nested objects and arrays
|
||||
if (value.type === 'object' && value.properties) {
|
||||
paths.push(...generateOutputPaths(value.properties, currentPath))
|
||||
} else if (value.type === 'array' && value.items?.properties) {
|
||||
paths.push(...generateOutputPaths(value.items.properties, currentPath))
|
||||
} else if (
|
||||
value.type === 'array' &&
|
||||
value.items &&
|
||||
typeof value.items === 'object' &&
|
||||
!('type' in value.items)
|
||||
) {
|
||||
paths.push(...generateOutputPaths(value.items, currentPath))
|
||||
}
|
||||
} else {
|
||||
const subPaths = generateOutputPaths(value, currentPath)
|
||||
paths.push(...subPaths)
|
||||
}
|
||||
} else {
|
||||
paths.push(currentPath)
|
||||
}
|
||||
}
|
||||
|
||||
return paths
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts nested fields from array or object properties
|
||||
*/
|
||||
const extractChildFields = (prop: any): SchemaField[] | undefined => {
|
||||
if (!isObject(prop)) return undefined
|
||||
|
||||
if (prop.properties && isObject(prop.properties)) {
|
||||
return extractNestedFields(prop.properties)
|
||||
}
|
||||
|
||||
if (prop.items?.properties && isObject(prop.items.properties)) {
|
||||
return extractNestedFields(prop.items.properties)
|
||||
}
|
||||
|
||||
if (!('type' in prop)) {
|
||||
return extractNestedFields(prop)
|
||||
}
|
||||
|
||||
if (prop.type === 'array') {
|
||||
const itemDefs = Object.fromEntries(
|
||||
Object.entries(prop).filter(([key]) => !RESERVED_KEYS.has(key))
|
||||
)
|
||||
if (Object.keys(itemDefs).length > 0) {
|
||||
return extractNestedFields(itemDefs)
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively extracts nested fields from output properties
|
||||
*/
|
||||
const extractNestedFields = (properties: Record<string, any>): SchemaField[] => {
|
||||
return Object.entries(properties).map(([name, prop]) => {
|
||||
const baseType = isObject(prop) && typeof prop.type === 'string' ? prop.type : 'string'
|
||||
const type = isObject(prop) && !('type' in prop) ? 'object' : baseType
|
||||
|
||||
return {
|
||||
name,
|
||||
type,
|
||||
description: isObject(prop) ? prop.description : undefined,
|
||||
children: extractChildFields(prop),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a schema field from an output definition
|
||||
*/
|
||||
const createFieldFromOutput = (
|
||||
name: string,
|
||||
output: any,
|
||||
responseFormatFields?: SchemaField[]
|
||||
): SchemaField => {
|
||||
const hasExplicitType = isObject(output) && typeof output.type === 'string'
|
||||
const type = hasExplicitType ? output.type : isObject(output) ? 'object' : 'string'
|
||||
|
||||
const field: SchemaField = {
|
||||
name,
|
||||
type,
|
||||
description: isObject(output) && 'description' in output ? output.description : undefined,
|
||||
}
|
||||
|
||||
if (name === 'data' && responseFormatFields && responseFormatFields.length > 0) {
|
||||
field.children = responseFormatFields
|
||||
} else {
|
||||
field.children = extractChildFields(output)
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets tool outputs for a block's operation
|
||||
*/
|
||||
const getToolOutputs = (
|
||||
blockConfig: BlockConfig | null,
|
||||
operation?: string
|
||||
): Record<string, any> => {
|
||||
if (!blockConfig?.tools?.config?.tool || !operation) return {}
|
||||
|
||||
try {
|
||||
const toolId = blockConfig.tools.config.tool({ operation })
|
||||
if (!toolId) return {}
|
||||
|
||||
const toolConfig = getTool(toolId)
|
||||
return toolConfig?.outputs || {}
|
||||
} catch {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
interface UseBlockOutputFieldsParams {
|
||||
blockId: string
|
||||
blockType: string
|
||||
mergedSubBlocks?: Record<string, any>
|
||||
responseFormat?: any
|
||||
operation?: string
|
||||
triggerMode?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook that generates consistent block output fields using the same logic as tag-dropdown
|
||||
* Returns SchemaField[] format for use in connection-blocks component
|
||||
*/
|
||||
export function useBlockOutputFields({
|
||||
blockId,
|
||||
blockType,
|
||||
mergedSubBlocks,
|
||||
responseFormat,
|
||||
operation,
|
||||
triggerMode,
|
||||
}: UseBlockOutputFieldsParams): SchemaField[] {
|
||||
return useMemo(() => {
|
||||
const blockConfig = getBlock(blockType)
|
||||
|
||||
// Handle loop/parallel blocks without config
|
||||
if (!blockConfig && (blockType === 'loop' || blockType === 'parallel')) {
|
||||
return [
|
||||
{
|
||||
name: 'results',
|
||||
type: 'array',
|
||||
description: 'Array of results from the loop/parallel execution',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if (!blockConfig) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Handle evaluator blocks - use metrics if available
|
||||
if (blockType === 'evaluator') {
|
||||
const metricsValue = mergedSubBlocks?.metrics?.value ?? getSubBlockValue(blockId, 'metrics')
|
||||
|
||||
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
|
||||
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
|
||||
return validMetrics.map((metric: { name: string }) => ({
|
||||
name: metric.name.toLowerCase(),
|
||||
type: 'number',
|
||||
description: `Metric: ${metric.name}`,
|
||||
}))
|
||||
}
|
||||
// Fall through to use blockConfig.outputs
|
||||
}
|
||||
|
||||
// Handle variables blocks - use variable assignments if available
|
||||
if (blockType === 'variables') {
|
||||
const variablesValue =
|
||||
mergedSubBlocks?.variables?.value ?? getSubBlockValue(blockId, 'variables')
|
||||
|
||||
if (variablesValue && Array.isArray(variablesValue) && variablesValue.length > 0) {
|
||||
const validAssignments = variablesValue.filter((assignment: { variableName?: string }) =>
|
||||
assignment?.variableName?.trim()
|
||||
)
|
||||
return validAssignments.map((assignment: { variableName: string }) => ({
|
||||
name: assignment.variableName.trim(),
|
||||
type: 'any',
|
||||
description: `Variable: ${assignment.variableName}`,
|
||||
}))
|
||||
}
|
||||
// Fall through to empty or default
|
||||
return []
|
||||
}
|
||||
|
||||
// Get base outputs using getBlockOutputs (handles triggers, starter, approval, etc.)
|
||||
let baseOutputs: Record<string, any> = {}
|
||||
|
||||
if (blockConfig.category === 'triggers' || blockType === 'starter') {
|
||||
// Use getBlockOutputPaths to get dynamic outputs, then reconstruct the structure
|
||||
const outputPaths = getBlockOutputPaths(blockType, mergedSubBlocks, triggerMode)
|
||||
if (outputPaths.length > 0) {
|
||||
// Reconstruct outputs structure from paths
|
||||
// This is a simplified approach - we'll use the paths to build the structure
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, triggerMode)
|
||||
} else if (blockType === 'starter') {
|
||||
const startWorkflowValue = mergedSubBlocks?.startWorkflow?.value
|
||||
if (startWorkflowValue === 'chat') {
|
||||
baseOutputs = {
|
||||
input: { type: 'string', description: 'User message' },
|
||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||
files: { type: 'files', description: 'Uploaded files' },
|
||||
}
|
||||
} else {
|
||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||
if (inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
|
||||
baseOutputs = {}
|
||||
inputFormatValue.forEach((field: { name?: string; type?: string }) => {
|
||||
if (field.name && field.name.trim() !== '') {
|
||||
baseOutputs[field.name] = {
|
||||
type: field.type || 'string',
|
||||
description: `Field from input format`,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} else if (blockType === TRIGGER_TYPES.GENERIC_WEBHOOK) {
|
||||
// Generic webhook returns the whole payload
|
||||
baseOutputs = {}
|
||||
} else {
|
||||
baseOutputs = {}
|
||||
}
|
||||
} else if (triggerMode && blockConfig.triggers?.enabled) {
|
||||
// Trigger mode enabled
|
||||
const dynamicOutputs = getBlockOutputPaths(blockType, mergedSubBlocks, true)
|
||||
if (dynamicOutputs.length > 0) {
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, true)
|
||||
} else {
|
||||
baseOutputs = blockConfig.outputs || {}
|
||||
}
|
||||
} else if (blockType === 'approval') {
|
||||
// Approval block uses dynamic outputs from inputFormat
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks)
|
||||
} else {
|
||||
// For tool-based blocks, try to get tool outputs first
|
||||
const operationValue =
|
||||
operation ?? mergedSubBlocks?.operation?.value ?? getSubBlockValue(blockId, 'operation')
|
||||
const toolOutputs = operationValue ? getToolOutputs(blockConfig, operationValue) : {}
|
||||
|
||||
if (Object.keys(toolOutputs).length > 0) {
|
||||
baseOutputs = toolOutputs
|
||||
} else {
|
||||
// Use getBlockOutputs which handles inputFormat merging
|
||||
baseOutputs = getBlockOutputs(blockType, mergedSubBlocks, triggerMode)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle responseFormat
|
||||
const responseFormatFields = responseFormat ? extractFieldsFromSchema(responseFormat) : []
|
||||
|
||||
// If responseFormat exists and has fields, merge with base outputs
|
||||
if (responseFormatFields.length > 0) {
|
||||
// If base outputs is empty, use responseFormat fields directly
|
||||
if (Object.keys(baseOutputs).length === 0) {
|
||||
return responseFormatFields.map((field) => ({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
description: field.description,
|
||||
children: undefined, // ResponseFormat fields are flat
|
||||
}))
|
||||
}
|
||||
|
||||
// Otherwise, merge: responseFormat takes precedence for 'data' field
|
||||
const fields: SchemaField[] = []
|
||||
const responseFormatFieldNames = new Set(responseFormatFields.map((f) => f.name))
|
||||
|
||||
// Add base outputs, replacing 'data' with responseFormat fields if present
|
||||
for (const [name, output] of Object.entries(baseOutputs)) {
|
||||
if (name === 'data' && responseFormatFields.length > 0) {
|
||||
fields.push(
|
||||
createFieldFromOutput(
|
||||
name,
|
||||
output,
|
||||
responseFormatFields.map((f) => ({
|
||||
name: f.name,
|
||||
type: f.type,
|
||||
description: f.description,
|
||||
}))
|
||||
)
|
||||
)
|
||||
} else if (!responseFormatFieldNames.has(name)) {
|
||||
fields.push(createFieldFromOutput(name, output))
|
||||
}
|
||||
}
|
||||
|
||||
// Add responseFormat fields that aren't in base outputs
|
||||
for (const field of responseFormatFields) {
|
||||
if (!baseOutputs[field.name]) {
|
||||
fields.push({
|
||||
name: field.name,
|
||||
type: field.type,
|
||||
description: field.description,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// No responseFormat, just use base outputs
|
||||
if (Object.keys(baseOutputs).length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
return Object.entries(baseOutputs).map(([name, output]) => createFieldFromOutput(name, output))
|
||||
}, [blockId, blockType, mergedSubBlocks, responseFormat, operation, triggerMode])
|
||||
}
|
||||
@@ -47,7 +47,6 @@ export function FolderItem({
|
||||
}: FolderItemProps) {
|
||||
const { expandedFolders, toggleExpanded, updateFolderAPI, deleteFolder } = useFolderStore()
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isDragging, setIsDragging] = useState(false)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
const [editValue, setEditValue] = useState(folder.name)
|
||||
@@ -169,14 +168,12 @@ export function FolderItem({
|
||||
}
|
||||
|
||||
const confirmDelete = async () => {
|
||||
setIsDeleting(true)
|
||||
setShowDeleteDialog(false)
|
||||
|
||||
try {
|
||||
await deleteFolder(folder.id, workspaceId)
|
||||
setShowDeleteDialog(false)
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete folder:', { error })
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,15 +230,12 @@ export function FolderItem({
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogFooter className='flex'>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]' disabled={isDeleting}>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={confirmDelete}
|
||||
disabled={isDeleting}
|
||||
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
{isDeleting ? 'Deleting...' : 'Delete'}
|
||||
Delete
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
@@ -354,15 +348,12 @@ export function FolderItem({
|
||||
</AlertDialogHeader>
|
||||
|
||||
<AlertDialogFooter className='flex'>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]' disabled={isDeleting}>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={confirmDelete}
|
||||
disabled={isDeleting}
|
||||
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
|
||||
>
|
||||
{isDeleting ? 'Deleting...' : 'Delete'}
|
||||
Delete
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
|
||||
@@ -301,15 +301,24 @@ function useDragHandlers(
|
||||
if (workflowIdsData) {
|
||||
const workflowIds = JSON.parse(workflowIdsData) as string[]
|
||||
|
||||
try {
|
||||
// Update workflows sequentially to avoid race conditions
|
||||
for (const workflowId of workflowIds) {
|
||||
await updateWorkflow(workflowId, { folderId: targetFolderId })
|
||||
}
|
||||
logger.info(logMessage || `Moved ${workflowIds.length} workflow(s)`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to move workflows:', error)
|
||||
}
|
||||
Promise.allSettled(
|
||||
workflowIds.map((workflowId) => updateWorkflow(workflowId, { folderId: targetFolderId }))
|
||||
)
|
||||
.then((results) => {
|
||||
const failures = results.filter((r) => r.status === 'rejected')
|
||||
|
||||
if (failures.length === 0) {
|
||||
logger.info(logMessage || `Moved ${workflowIds.length} workflow(s)`)
|
||||
} else if (failures.length === workflowIds.length) {
|
||||
logger.error('Failed to move all workflows')
|
||||
} else {
|
||||
const successCount = results.length - failures.length
|
||||
logger.warn(`Partially moved workflows: ${successCount}/${workflowIds.length}`)
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error('Unexpected error moving workflows:', error)
|
||||
})
|
||||
}
|
||||
|
||||
// Handle folder drops
|
||||
|
||||
@@ -502,3 +502,103 @@ export function generateRequestId(): string {
|
||||
* No-operation function for use as default callback
|
||||
*/
|
||||
export const noop = () => {}
|
||||
|
||||
/**
|
||||
* Options for performing an optimistic update with automatic rollback on error
|
||||
*/
|
||||
export interface OptimisticUpdateOptions<T> {
|
||||
/**
|
||||
* Function that returns the current state value (for rollback purposes)
|
||||
*/
|
||||
getCurrentState: () => T
|
||||
/**
|
||||
* Function that performs the optimistic update to the UI state
|
||||
*/
|
||||
optimisticUpdate: () => void
|
||||
/**
|
||||
* Async function that performs the actual API call
|
||||
*/
|
||||
apiCall: () => Promise<void>
|
||||
/**
|
||||
* Function that rolls back the state to the original value
|
||||
* @param originalValue - The value returned by getCurrentState before the update
|
||||
*/
|
||||
rollback: (originalValue: T) => void
|
||||
/**
|
||||
* Optional error message to log if the operation fails
|
||||
*/
|
||||
errorMessage?: string
|
||||
/**
|
||||
* Optional callback to execute on error (e.g., show toast notification)
|
||||
*/
|
||||
onError?: (error: Error, originalValue: T) => void
|
||||
/**
|
||||
* Optional callback that always runs regardless of success or error (e.g., to clear loading states)
|
||||
*/
|
||||
onComplete?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an optimistic update with automatic rollback on error.
|
||||
* This utility standardizes the pattern of:
|
||||
* 1. Save current state
|
||||
* 2. Update UI optimistically
|
||||
* 3. Make API call
|
||||
* 4. Rollback on error
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await withOptimisticUpdate({
|
||||
* getCurrentState: () => get().folders[id],
|
||||
* optimisticUpdate: () => set(state => ({
|
||||
* folders: { ...state.folders, [id]: { ...folder, name: newName } }
|
||||
* })),
|
||||
* apiCall: async () => {
|
||||
* await fetch(`/api/folders/${id}`, {
|
||||
* method: 'PUT',
|
||||
* body: JSON.stringify({ name: newName })
|
||||
* })
|
||||
* },
|
||||
* rollback: (originalFolder) => set(state => ({
|
||||
* folders: { ...state.folders, [id]: originalFolder }
|
||||
* })),
|
||||
* errorMessage: 'Failed to rename folder',
|
||||
* onError: (error) => toast.error('Could not rename folder')
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export async function withOptimisticUpdate<T>(options: OptimisticUpdateOptions<T>): Promise<void> {
|
||||
const {
|
||||
getCurrentState,
|
||||
optimisticUpdate,
|
||||
apiCall,
|
||||
rollback,
|
||||
errorMessage,
|
||||
onError,
|
||||
onComplete,
|
||||
} = options
|
||||
|
||||
const originalValue = getCurrentState()
|
||||
|
||||
optimisticUpdate()
|
||||
|
||||
try {
|
||||
await apiCall()
|
||||
} catch (error) {
|
||||
rollback(originalValue)
|
||||
|
||||
if (errorMessage) {
|
||||
logger.error(errorMessage, { error })
|
||||
}
|
||||
|
||||
if (onError && error instanceof Error) {
|
||||
onError(error, originalValue)
|
||||
}
|
||||
|
||||
throw error
|
||||
} finally {
|
||||
if (onComplete) {
|
||||
onComplete()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import type { CustomToolsState, CustomToolsStore } from './types'
|
||||
|
||||
const logger = createLogger('CustomToolsStore')
|
||||
@@ -136,84 +137,108 @@ export const useCustomToolsStore = create<CustomToolsStore>()(
|
||||
},
|
||||
|
||||
updateTool: async (workspaceId: string, id: string, updates) => {
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
try {
|
||||
const tool = get().tools.find((t) => t.id === id)
|
||||
if (!tool) {
|
||||
throw new Error('Tool not found')
|
||||
}
|
||||
|
||||
logger.info(`Updating custom tool: ${id} in workspace ${workspaceId}`)
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [
|
||||
{
|
||||
id,
|
||||
title: updates.title ?? tool.title,
|
||||
schema: updates.schema ?? tool.schema,
|
||||
code: updates.code ?? tool.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ApiError(data.error || 'Failed to update tool', response.status)
|
||||
}
|
||||
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
throw new Error('Invalid API response: missing tools data')
|
||||
}
|
||||
|
||||
set({ tools: data.data, isLoading: false })
|
||||
|
||||
logger.info(`Updated custom tool: ${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error updating custom tool:', error)
|
||||
set({ isLoading: false })
|
||||
throw error
|
||||
const tool = get().tools.find((t) => t.id === id)
|
||||
if (!tool) {
|
||||
throw new Error('Tool not found')
|
||||
}
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().tools,
|
||||
optimisticUpdate: () => {
|
||||
set((state) => ({
|
||||
tools: state.tools.map((t) =>
|
||||
t.id === id
|
||||
? {
|
||||
...t,
|
||||
title: updates.title ?? t.title,
|
||||
schema: updates.schema ?? t.schema,
|
||||
code: updates.code ?? t.code,
|
||||
}
|
||||
: t
|
||||
),
|
||||
isLoading: true,
|
||||
error: null,
|
||||
}))
|
||||
},
|
||||
apiCall: async () => {
|
||||
logger.info(`Updating custom tool: ${id} in workspace ${workspaceId}`)
|
||||
|
||||
const response = await fetch(API_ENDPOINT, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
tools: [
|
||||
{
|
||||
id,
|
||||
title: updates.title ?? tool.title,
|
||||
schema: updates.schema ?? tool.schema,
|
||||
code: updates.code ?? tool.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ApiError(data.error || 'Failed to update tool', response.status)
|
||||
}
|
||||
|
||||
if (!data.data || !Array.isArray(data.data)) {
|
||||
throw new Error('Invalid API response: missing tools data')
|
||||
}
|
||||
|
||||
set({ tools: data.data })
|
||||
logger.info(`Updated custom tool: ${id}`)
|
||||
},
|
||||
rollback: (originalTools) => {
|
||||
set({ tools: originalTools })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error updating custom tool',
|
||||
})
|
||||
},
|
||||
|
||||
deleteTool: async (workspaceId: string | null, id: string) => {
|
||||
set({ isLoading: true, error: null })
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().tools,
|
||||
optimisticUpdate: () => {
|
||||
set((state) => ({
|
||||
tools: state.tools.filter((tool) => tool.id !== id),
|
||||
isLoading: true,
|
||||
error: null,
|
||||
}))
|
||||
},
|
||||
apiCall: async () => {
|
||||
logger.info(`Deleting custom tool: ${id}`)
|
||||
|
||||
try {
|
||||
logger.info(`Deleting custom tool: ${id}`)
|
||||
const url = workspaceId
|
||||
? `${API_ENDPOINT}?id=${id}&workspaceId=${workspaceId}`
|
||||
: `${API_ENDPOINT}?id=${id}`
|
||||
|
||||
// Build URL with optional workspaceId (for user-scoped tools)
|
||||
const url = workspaceId
|
||||
? `${API_ENDPOINT}?id=${id}&workspaceId=${workspaceId}`
|
||||
: `${API_ENDPOINT}?id=${id}`
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
const data = await response.json()
|
||||
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete tool')
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to delete tool')
|
||||
}
|
||||
|
||||
set((state) => ({
|
||||
tools: state.tools.filter((tool) => tool.id !== id),
|
||||
isLoading: false,
|
||||
}))
|
||||
|
||||
logger.info(`Deleted custom tool: ${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error deleting custom tool:', error)
|
||||
set({ isLoading: false })
|
||||
throw error
|
||||
}
|
||||
logger.info(`Deleted custom tool: ${id}`)
|
||||
},
|
||||
rollback: (originalTools) => {
|
||||
set({ tools: originalTools })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error deleting custom tool',
|
||||
})
|
||||
},
|
||||
|
||||
getTool: (id: string) => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('FoldersStore')
|
||||
@@ -282,62 +283,103 @@ export const useFolderStore = create<FolderState>()(
|
||||
},
|
||||
|
||||
updateFolderAPI: async (id, updates) => {
|
||||
const response = await fetch(`/api/folders/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
const originalFolder = get().folders[id]
|
||||
if (!originalFolder) {
|
||||
throw new Error('Folder not found')
|
||||
}
|
||||
|
||||
let updatedFolder: WorkflowFolder | null = null
|
||||
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => originalFolder,
|
||||
optimisticUpdate: () => {
|
||||
get().updateFolder(id, { ...updates, updatedAt: new Date() })
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/folders/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
const processedFolder = {
|
||||
...folder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}
|
||||
|
||||
get().updateFolder(id, processedFolder)
|
||||
updatedFolder = processedFolder
|
||||
},
|
||||
rollback: (original) => {
|
||||
get().updateFolder(id, original)
|
||||
},
|
||||
errorMessage: 'Failed to update folder',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update folder')
|
||||
}
|
||||
|
||||
const { folder } = await response.json()
|
||||
const processedFolder = {
|
||||
...folder,
|
||||
createdAt: new Date(folder.createdAt),
|
||||
updatedAt: new Date(folder.updatedAt),
|
||||
}
|
||||
|
||||
get().updateFolder(id, processedFolder)
|
||||
|
||||
return processedFolder
|
||||
return updatedFolder || { ...originalFolder, ...updates }
|
||||
},
|
||||
|
||||
deleteFolder: async (id: string, workspaceId: string) => {
|
||||
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
|
||||
const getAllSubfolderIds = (parentId: string): string[] => {
|
||||
const folders = get().folders
|
||||
const childIds = Object.keys(folders).filter(
|
||||
(folderId) => folders[folderId].parentId === parentId
|
||||
)
|
||||
const allIds = [...childIds]
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to delete folder')
|
||||
childIds.forEach((childId) => {
|
||||
allIds.push(...getAllSubfolderIds(childId))
|
||||
})
|
||||
|
||||
return allIds
|
||||
}
|
||||
|
||||
const responseData = await response.json()
|
||||
const deletedFolderIds = [id, ...getAllSubfolderIds(id)]
|
||||
|
||||
// Remove the folder from local state
|
||||
get().removeFolder(id)
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => ({
|
||||
folders: { ...get().folders },
|
||||
expandedFolders: new Set(get().expandedFolders),
|
||||
}),
|
||||
optimisticUpdate: () => {
|
||||
deletedFolderIds.forEach((folderId) => {
|
||||
get().removeFolder(folderId)
|
||||
})
|
||||
|
||||
// Remove from expanded state
|
||||
set((state) => {
|
||||
const newExpanded = new Set(state.expandedFolders)
|
||||
newExpanded.delete(id)
|
||||
return { expandedFolders: newExpanded }
|
||||
set((state) => {
|
||||
const newExpanded = new Set(state.expandedFolders)
|
||||
deletedFolderIds.forEach((folderId) => newExpanded.delete(folderId))
|
||||
return { expandedFolders: newExpanded }
|
||||
})
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to delete folder')
|
||||
}
|
||||
|
||||
const responseData = await response.json()
|
||||
logger.info(
|
||||
`Deleted ${responseData.deletedItems.workflows} workflow(s) and ${responseData.deletedItems.folders} folder(s)`
|
||||
)
|
||||
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
await workflowRegistry.loadWorkflows(workspaceId)
|
||||
},
|
||||
rollback: (originalState) => {
|
||||
set({ folders: originalState.folders, expandedFolders: originalState.expandedFolders })
|
||||
},
|
||||
errorMessage: 'Failed to delete folder',
|
||||
})
|
||||
|
||||
// Remove subfolders from local state
|
||||
get().removeSubfoldersRecursively(id)
|
||||
|
||||
// The backend has already deleted the workflows, so we just need to refresh
|
||||
// the workflow registry to sync with the server state
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
if (workspaceId) {
|
||||
await workflowRegistry.loadWorkflows(workspaceId)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Deleted ${responseData.deletedItems.workflows} workflow(s) and ${responseData.deletedItems.folders} folder(s)`
|
||||
)
|
||||
},
|
||||
|
||||
isWorkflowInDeletedSubfolder: (workflow: Workflow, deletedFolderId: string) => {
|
||||
@@ -372,6 +414,5 @@ export const useFolderStore = create<FolderState>()(
|
||||
)
|
||||
)
|
||||
|
||||
// Selector hook for checking if a workflow is selected (avoids get() calls)
|
||||
export const useIsWorkflowSelected = (workflowId: string) =>
|
||||
useFolderStore((state) => state.selectedWorkflows.has(workflowId))
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { create } from 'zustand'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import type {
|
||||
CachedWorkspaceEnvData,
|
||||
@@ -48,55 +49,53 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
},
|
||||
|
||||
saveEnvironmentVariables: async (variables: Record<string, string>) => {
|
||||
try {
|
||||
set({ isLoading: true, error: null })
|
||||
const transformedVariables = Object.entries(variables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: { key, value },
|
||||
}),
|
||||
{}
|
||||
)
|
||||
|
||||
const transformedVariables = Object.entries(variables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: { key, value },
|
||||
}),
|
||||
{}
|
||||
)
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().variables,
|
||||
optimisticUpdate: () => {
|
||||
set({ variables: transformedVariables, isLoading: true, error: null })
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
variables: Object.entries(transformedVariables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: (value as EnvironmentVariable).value,
|
||||
}),
|
||||
{}
|
||||
),
|
||||
}),
|
||||
})
|
||||
|
||||
set({ variables: transformedVariables })
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const response = await fetch(API_ENDPOINTS.ENVIRONMENT, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
variables: Object.entries(transformedVariables).reduce(
|
||||
(acc, [key, value]) => ({
|
||||
...acc,
|
||||
[key]: (value as EnvironmentVariable).value,
|
||||
}),
|
||||
{}
|
||||
),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save environment variables: ${response.statusText}`)
|
||||
}
|
||||
|
||||
set({ isLoading: false })
|
||||
|
||||
get().clearWorkspaceEnvCache()
|
||||
} catch (error) {
|
||||
logger.error('Error saving environment variables:', { error })
|
||||
set({
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
isLoading: false,
|
||||
})
|
||||
|
||||
get().loadEnvironmentVariables()
|
||||
}
|
||||
get().clearWorkspaceEnvCache()
|
||||
},
|
||||
rollback: (originalVariables) => {
|
||||
set({ variables: originalVariables })
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: 'Error saving environment variables',
|
||||
})
|
||||
},
|
||||
|
||||
loadWorkspaceEnvironment: async (workspaceId: string) => {
|
||||
// Check cache first
|
||||
const cached = get().workspaceEnvCache.get(workspaceId)
|
||||
if (cached) {
|
||||
return {
|
||||
@@ -121,7 +120,6 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
conflicts: string[]
|
||||
}
|
||||
|
||||
// Cache the result
|
||||
const cache = new Map(get().workspaceEnvCache)
|
||||
cache.set(workspaceId, {
|
||||
...envData,
|
||||
@@ -150,7 +148,6 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
}
|
||||
set({ isLoading: false })
|
||||
|
||||
// Invalidate cache for this workspace
|
||||
get().clearWorkspaceEnvCache(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Error updating workspace environment:', { error })
|
||||
@@ -171,7 +168,6 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
}
|
||||
set({ isLoading: false })
|
||||
|
||||
// Invalidate cache for this workspace
|
||||
get().clearWorkspaceEnvCache(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Error removing workspace environment keys:', { error })
|
||||
@@ -189,7 +185,6 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
|
||||
cache.delete(workspaceId)
|
||||
set({ workspaceEnvCache: cache })
|
||||
} else {
|
||||
// Clear all caches
|
||||
set({ workspaceEnvCache: new Map() })
|
||||
}
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { create } from 'zustand'
|
||||
import { devtools, persist } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { syncThemeToNextThemes } from '@/lib/theme-sync'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import type { General, GeneralStore, UserSettings } from '@/stores/settings/general/types'
|
||||
|
||||
const logger = createLogger('GeneralStore')
|
||||
@@ -41,34 +42,28 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
isSuperUserModeLoading: false,
|
||||
}
|
||||
|
||||
// Optimistic update helper
|
||||
const updateSettingOptimistic = async <K extends keyof UserSettings>(
|
||||
key: K,
|
||||
value: UserSettings[K],
|
||||
loadingKey: keyof General,
|
||||
stateKey: keyof General
|
||||
) => {
|
||||
// Prevent multiple simultaneous updates
|
||||
if ((get() as any)[loadingKey]) return
|
||||
|
||||
const originalValue = (get() as any)[stateKey]
|
||||
|
||||
// Optimistic update
|
||||
set({ [stateKey]: value, [loadingKey]: true } as any)
|
||||
|
||||
try {
|
||||
await get().updateSetting(key, value)
|
||||
set({ [loadingKey]: false } as any)
|
||||
} catch (error) {
|
||||
// Rollback on error
|
||||
set({ [stateKey]: originalValue, [loadingKey]: false } as any)
|
||||
logger.error(`Failed to update ${String(key)}, rolled back:`, error)
|
||||
}
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => (get() as any)[stateKey],
|
||||
optimisticUpdate: () => set({ [stateKey]: value, [loadingKey]: true } as any),
|
||||
apiCall: async () => {
|
||||
await get().updateSetting(key, value)
|
||||
},
|
||||
rollback: (originalValue) => set({ [stateKey]: originalValue } as any),
|
||||
onComplete: () => set({ [loadingKey]: false } as any),
|
||||
errorMessage: `Failed to update ${String(key)}, rolled back`,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
...store,
|
||||
// Basic Actions with optimistic updates
|
||||
toggleAutoConnect: async () => {
|
||||
if (get().isAutoConnectLoading) return
|
||||
const newValue = !get().isAutoConnectEnabled
|
||||
@@ -138,25 +133,22 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
setTheme: async (theme) => {
|
||||
if (get().isThemeLoading) return
|
||||
|
||||
const originalTheme = get().theme
|
||||
|
||||
// Optimistic update
|
||||
set({ theme, isThemeLoading: true })
|
||||
|
||||
// Update next-themes immediately for instant feedback
|
||||
syncThemeToNextThemes(theme)
|
||||
|
||||
try {
|
||||
// Sync to DB for authenticated users
|
||||
await get().updateSetting('theme', theme)
|
||||
set({ isThemeLoading: false })
|
||||
} catch (error) {
|
||||
// Rollback on error
|
||||
set({ theme: originalTheme, isThemeLoading: false })
|
||||
syncThemeToNextThemes(originalTheme)
|
||||
logger.error('Failed to sync theme to database:', error)
|
||||
throw error
|
||||
}
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => get().theme,
|
||||
optimisticUpdate: () => {
|
||||
set({ theme, isThemeLoading: true })
|
||||
syncThemeToNextThemes(theme)
|
||||
},
|
||||
apiCall: async () => {
|
||||
await get().updateSetting('theme', theme)
|
||||
},
|
||||
rollback: (originalTheme) => {
|
||||
set({ theme: originalTheme })
|
||||
syncThemeToNextThemes(originalTheme)
|
||||
},
|
||||
onComplete: () => set({ isThemeLoading: false }),
|
||||
errorMessage: 'Failed to sync theme to database',
|
||||
})
|
||||
},
|
||||
|
||||
setTelemetryEnabled: async (enabled) => {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateCreativeWorkflowName } from '@/lib/naming'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
@@ -753,100 +754,120 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
return id
|
||||
},
|
||||
|
||||
// Delete workflow and clean up associated storage
|
||||
removeWorkflow: async (id: string) => {
|
||||
const { workflows } = get()
|
||||
const { workflows, activeWorkflowId } = get()
|
||||
const workflowToDelete = workflows[id]
|
||||
|
||||
if (!workflowToDelete) {
|
||||
logger.warn(`Attempted to delete non-existent workflow: ${id}`)
|
||||
return
|
||||
}
|
||||
set({ isLoading: true, error: null })
|
||||
|
||||
try {
|
||||
// Call DELETE endpoint to remove from database
|
||||
const response = await fetch(`/api/workflows/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
const isDeletingActiveWorkflow = activeWorkflowId === id
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ error: 'Unknown error' }))
|
||||
throw new Error(error.error || 'Failed to delete workflow')
|
||||
}
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => ({
|
||||
workflows: { ...get().workflows },
|
||||
activeWorkflowId: get().activeWorkflowId,
|
||||
subBlockValues: { ...useSubBlockStore.getState().workflowValues },
|
||||
workflowStoreState: isDeletingActiveWorkflow
|
||||
? {
|
||||
blocks: { ...useWorkflowStore.getState().blocks },
|
||||
edges: [...useWorkflowStore.getState().edges],
|
||||
loops: { ...useWorkflowStore.getState().loops },
|
||||
parallels: { ...useWorkflowStore.getState().parallels },
|
||||
isDeployed: useWorkflowStore.getState().isDeployed,
|
||||
deployedAt: useWorkflowStore.getState().deployedAt,
|
||||
lastSaved: useWorkflowStore.getState().lastSaved,
|
||||
}
|
||||
: null,
|
||||
}),
|
||||
optimisticUpdate: () => {
|
||||
const newWorkflows = { ...get().workflows }
|
||||
delete newWorkflows[id]
|
||||
|
||||
logger.info(`Successfully deleted workflow ${id} from database`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to delete workflow ${id} from database:`, error)
|
||||
set({
|
||||
error: `Failed to delete workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
isLoading: false,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Only update local state after successful deletion from database
|
||||
set((state) => {
|
||||
const newWorkflows = { ...state.workflows }
|
||||
delete newWorkflows[id]
|
||||
|
||||
// Clean up subblock values for this workflow
|
||||
useSubBlockStore.setState((subBlockState) => {
|
||||
const newWorkflowValues = { ...subBlockState.workflowValues }
|
||||
const currentSubBlockValues = useSubBlockStore.getState().workflowValues
|
||||
const newWorkflowValues = { ...currentSubBlockValues }
|
||||
delete newWorkflowValues[id]
|
||||
return { workflowValues: newWorkflowValues }
|
||||
})
|
||||
useSubBlockStore.setState({ workflowValues: newWorkflowValues })
|
||||
|
||||
// If deleting active workflow, clear active workflow ID immediately
|
||||
// Don't automatically switch to another workflow to prevent race conditions
|
||||
let newActiveWorkflowId = state.activeWorkflowId
|
||||
if (state.activeWorkflowId === id) {
|
||||
newActiveWorkflowId = null
|
||||
let newActiveWorkflowId = get().activeWorkflowId
|
||||
if (isDeletingActiveWorkflow) {
|
||||
newActiveWorkflowId = null
|
||||
|
||||
// Clear workflow store state immediately when deleting active workflow
|
||||
useWorkflowStore.setState({
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
lastSaved: Date.now(),
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Cleared active workflow ${id} - user will need to manually select another workflow`
|
||||
)
|
||||
}
|
||||
|
||||
// Cancel any schedule for this workflow (async, don't wait)
|
||||
fetch(API_ENDPOINTS.SCHEDULE, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId: id,
|
||||
state: {
|
||||
useWorkflowStore.setState({
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
},
|
||||
}),
|
||||
}).catch((error) => {
|
||||
logger.error(`Error cancelling schedule for deleted workflow ${id}:`, error)
|
||||
})
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
lastSaved: Date.now(),
|
||||
})
|
||||
|
||||
logger.info(`Removed workflow ${id} from local state`)
|
||||
logger.info(
|
||||
`Cleared active workflow ${id} - user will need to manually select another workflow`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
workflows: newWorkflows,
|
||||
activeWorkflowId: newActiveWorkflowId,
|
||||
error: null,
|
||||
isLoading: false, // Clear loading state after successful deletion
|
||||
}
|
||||
set({
|
||||
workflows: newWorkflows,
|
||||
activeWorkflowId: newActiveWorkflowId,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
})
|
||||
|
||||
logger.info(`Removed workflow ${id} from local state (optimistic)`)
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/workflows/${id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({ error: 'Unknown error' }))
|
||||
throw new Error(error.error || 'Failed to delete workflow')
|
||||
}
|
||||
|
||||
logger.info(`Successfully deleted workflow ${id} from database`)
|
||||
|
||||
fetch(API_ENDPOINTS.SCHEDULE, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId: id,
|
||||
state: {
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
},
|
||||
}),
|
||||
}).catch((error) => {
|
||||
logger.error(`Error cancelling schedule for deleted workflow ${id}:`, error)
|
||||
})
|
||||
},
|
||||
rollback: (originalState) => {
|
||||
set({
|
||||
workflows: originalState.workflows,
|
||||
activeWorkflowId: originalState.activeWorkflowId,
|
||||
})
|
||||
|
||||
useSubBlockStore.setState({ workflowValues: originalState.subBlockValues })
|
||||
|
||||
if (originalState.workflowStoreState) {
|
||||
useWorkflowStore.setState(originalState.workflowStoreState)
|
||||
logger.info(`Restored workflow store state for workflow ${id}`)
|
||||
}
|
||||
|
||||
logger.info(`Rolled back deletion of workflow ${id}`)
|
||||
},
|
||||
onComplete: () => {
|
||||
set({ isLoading: false })
|
||||
},
|
||||
errorMessage: `Failed to delete workflow ${id}`,
|
||||
})
|
||||
},
|
||||
|
||||
// Update workflow metadata
|
||||
updateWorkflow: async (id: string, metadata: Partial<WorkflowMetadata>) => {
|
||||
const { workflows } = get()
|
||||
const workflow = workflows[id]
|
||||
@@ -855,71 +876,70 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
return
|
||||
}
|
||||
|
||||
// Optimistically update local state first
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: {
|
||||
...workflow,
|
||||
...metadata,
|
||||
lastModified: new Date(),
|
||||
createdAt: workflow.createdAt, // Preserve creation date
|
||||
},
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Persist to database via API
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(metadata),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update workflow')
|
||||
}
|
||||
|
||||
const { workflow: updatedWorkflow } = await response.json()
|
||||
logger.info(`Successfully updated workflow ${id} metadata`, metadata)
|
||||
|
||||
// Update with server response to ensure consistency
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: {
|
||||
...state.workflows[id],
|
||||
name: updatedWorkflow.name,
|
||||
description: updatedWorkflow.description,
|
||||
color: updatedWorkflow.color,
|
||||
folderId: updatedWorkflow.folderId,
|
||||
lastModified: new Date(updatedWorkflow.updatedAt),
|
||||
createdAt: updatedWorkflow.createdAt
|
||||
? new Date(updatedWorkflow.createdAt)
|
||||
: state.workflows[id].createdAt,
|
||||
await withOptimisticUpdate({
|
||||
getCurrentState: () => workflow,
|
||||
optimisticUpdate: () => {
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: {
|
||||
...workflow,
|
||||
...metadata,
|
||||
lastModified: new Date(),
|
||||
createdAt: workflow.createdAt, // Preserve creation date
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
} catch (error) {
|
||||
logger.error(`Failed to update workflow ${id} metadata:`, error)
|
||||
error: null,
|
||||
}))
|
||||
},
|
||||
apiCall: async () => {
|
||||
const response = await fetch(`/api/workflows/${id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(metadata),
|
||||
})
|
||||
|
||||
// Revert optimistic update on error
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: workflow, // Revert to original state
|
||||
},
|
||||
error: `Failed to update workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
}))
|
||||
}
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update workflow')
|
||||
}
|
||||
|
||||
const { workflow: updatedWorkflow } = await response.json()
|
||||
logger.info(`Successfully updated workflow ${id} metadata`, metadata)
|
||||
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: {
|
||||
...state.workflows[id],
|
||||
name: updatedWorkflow.name,
|
||||
description: updatedWorkflow.description,
|
||||
color: updatedWorkflow.color,
|
||||
folderId: updatedWorkflow.folderId,
|
||||
lastModified: new Date(updatedWorkflow.updatedAt),
|
||||
createdAt: updatedWorkflow.createdAt
|
||||
? new Date(updatedWorkflow.createdAt)
|
||||
: state.workflows[id].createdAt,
|
||||
},
|
||||
},
|
||||
}))
|
||||
},
|
||||
rollback: (originalWorkflow) => {
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: originalWorkflow, // Revert to original state
|
||||
},
|
||||
error: `Failed to update workflow: ${metadata.name ? 'name' : 'metadata'}`,
|
||||
}))
|
||||
},
|
||||
errorMessage: `Failed to update workflow ${id} metadata`,
|
||||
})
|
||||
},
|
||||
|
||||
logout: () => {
|
||||
logger.info('Logging out - clearing all workflow data')
|
||||
|
||||
// Clear all state
|
||||
resetWorkflowStores()
|
||||
|
||||
set({
|
||||
|
||||
Reference in New Issue
Block a user