improvement(tanstack): migrate multiple stores (#1994)

* improvement(tanstack): migrate folders, knowledge to tanstack

* fix types
This commit is contained in:
Vikhyath Mondreti
2025-11-14 15:38:15 -08:00
committed by GitHub
parent 4b4060f63f
commit ec430abca2
38 changed files with 1399 additions and 1990 deletions

View File

@@ -44,8 +44,12 @@ import {
SearchInput,
} from '@/app/workspace/[workspaceId]/knowledge/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowledge'
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
import {
useKnowledgeBase,
useKnowledgeBaseDocuments,
useKnowledgeBasesList,
} from '@/hooks/use-knowledge'
import type { DocumentData } from '@/stores/knowledge/store'
const logger = createLogger('KnowledgeBase')
@@ -125,10 +129,10 @@ export function KnowledgeBase({
id,
knowledgeBaseName: passedKnowledgeBaseName,
}: KnowledgeBaseProps) {
const { removeKnowledgeBase } = useKnowledgeStore()
const userPermissions = useUserPermissionsContext()
const params = useParams()
const workspaceId = params.workspaceId as string
const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false })
const userPermissions = useUserPermissionsContext()
const [searchQuery, setSearchQuery] = useState('')

View File

@@ -1,4 +1,4 @@
import { useEffect, useMemo, useState } from 'react'
import { useMemo, useState } from 'react'
import { Check, ChevronDown } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/emcn'
@@ -22,7 +22,8 @@ import {
filterButtonClass,
folderDropdownListStyle,
} from '@/app/workspace/[workspaceId]/logs/components/filters/components/shared'
import { useFolderStore } from '@/stores/folders/store'
import { useFolders } from '@/hooks/queries/folders'
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
import { useFilterStore } from '@/stores/logs/filters/store'
const logger = createLogger('LogsFolderFilter')
@@ -36,56 +37,37 @@ interface FolderOption {
export default function FolderFilter() {
const { folderIds, toggleFolderId, setFolderIds } = useFilterStore()
const { getFolderTree, fetchFolders } = useFolderStore()
const { getFolderTree } = useFolderStore()
const params = useParams()
const workspaceId = params.workspaceId as string
const [folders, setFolders] = useState<FolderOption[]>([])
const [loading, setLoading] = useState(true)
const [search, setSearch] = useState('')
const { isLoading: foldersLoading } = useFolders(workspaceId)
// Fetch all available folders from the API
useEffect(() => {
const fetchFoldersData = async () => {
try {
setLoading(true)
if (workspaceId) {
await fetchFolders(workspaceId)
const folderTree = getFolderTree(workspaceId)
const folderTree = workspaceId ? getFolderTree(workspaceId) : []
// Flatten the folder tree and create options with full paths
const flattenFolders = (nodes: any[], parentPath = ''): FolderOption[] => {
const result: FolderOption[] = []
const folders: FolderOption[] = useMemo(() => {
const flattenFolders = (nodes: FolderTreeNode[], parentPath = ''): FolderOption[] => {
const result: FolderOption[] = []
for (const node of nodes) {
const currentPath = parentPath ? `${parentPath} / ${node.name}` : node.name
result.push({
id: node.id,
name: node.name,
color: node.color || '#6B7280',
path: currentPath,
})
for (const node of nodes) {
const currentPath = parentPath ? `${parentPath} / ${node.name}` : node.name
result.push({
id: node.id,
name: node.name,
color: node.color || '#6B7280',
path: currentPath,
})
// Add children recursively
if (node.children && node.children.length > 0) {
result.push(...flattenFolders(node.children, currentPath))
}
}
return result
}
const folderOptions = flattenFolders(folderTree)
setFolders(folderOptions)
if (node.children && node.children.length > 0) {
result.push(...flattenFolders(node.children, currentPath))
}
} catch (error) {
logger.error('Failed to fetch folders', { error })
} finally {
setLoading(false)
}
return result
}
fetchFoldersData()
}, [workspaceId, fetchFolders, getFolderTree])
return flattenFolders(folderTree)
}, [folderTree])
// Get display text for the dropdown button
const getSelectedFoldersText = () => {
@@ -111,7 +93,7 @@ export default function FolderFilter() {
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant='outline' className={filterButtonClass}>
{loading ? 'Loading folders...' : getSelectedFoldersText()}
{foldersLoading ? 'Loading folders...' : getSelectedFoldersText()}
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
</Button>
</DropdownMenuTrigger>
@@ -125,7 +107,9 @@ export default function FolderFilter() {
<Command>
<CommandInput placeholder='Search folders...' onValueChange={(v) => setSearch(v)} />
<CommandList className={commandListClass} style={folderDropdownListStyle}>
<CommandEmpty>{loading ? 'Loading folders...' : 'No folders found.'}</CommandEmpty>
<CommandEmpty>
{foldersLoading ? 'Loading folders...' : 'No folders found.'}
</CommandEmpty>
<CommandGroup>
<CommandItem
value='all-folders'

View File

@@ -12,6 +12,7 @@ import { AutocompleteSearch } from '@/app/workspace/[workspaceId]/logs/component
import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/sidebar'
import Dashboard from '@/app/workspace/[workspaceId]/logs/dashboard'
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils'
import { useFolders } from '@/hooks/queries/folders'
import { useDebounce } from '@/hooks/use-debounce'
import { useFolderStore } from '@/stores/folders/store'
import { useFilterStore } from '@/stores/logs/filters/store'
@@ -120,7 +121,8 @@ export default function Logs() {
setSearchQuery(storeSearchQuery)
}, [storeSearchQuery])
const { fetchFolders, getFolderTree } = useFolderStore()
const foldersQuery = useFolders(workspaceId)
const { getFolderTree } = useFolderStore()
useEffect(() => {
let cancelled = false
@@ -138,7 +140,6 @@ export default function Logs() {
if (!cancelled) setAvailableWorkflows([])
}
await fetchFolders(workspaceId)
const tree = getFolderTree(workspaceId)
const flatten = (nodes: any[], parentPath = ''): string[] => {
@@ -168,7 +169,7 @@ export default function Logs() {
return () => {
cancelled = true
}
}, [workspaceId, fetchFolders, getFolderTree])
}, [workspaceId, getFolderTree, foldersQuery.data])
useEffect(() => {
if (isInitialized.current && debouncedSearchQuery !== storeSearchQuery) {

View File

@@ -0,0 +1,49 @@
'use client'
import { useEffect } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { useProviderModels } from '@/hooks/queries/providers'
import { updateOllamaProviderModels, updateOpenRouterProviderModels } from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ProviderName } from '@/stores/providers/types'
const logger = createLogger('ProviderModelsLoader')
function useSyncProvider(provider: ProviderName) {
const setProviderModels = useProvidersStore((state) => state.setProviderModels)
const setProviderLoading = useProvidersStore((state) => state.setProviderLoading)
const { data, isLoading, isFetching, error } = useProviderModels(provider)
useEffect(() => {
setProviderLoading(provider, isLoading || isFetching)
}, [provider, isLoading, isFetching, setProviderLoading])
useEffect(() => {
if (!data) return
try {
if (provider === 'ollama') {
updateOllamaProviderModels(data)
} else if (provider === 'openrouter') {
void updateOpenRouterProviderModels(data)
}
} catch (syncError) {
logger.warn(`Failed to sync provider definitions for ${provider}`, syncError as Error)
}
setProviderModels(provider, data)
}, [provider, data, setProviderModels])
useEffect(() => {
if (error) {
logger.error(`Failed to load ${provider} models`, error)
}
}, [provider, error])
}
export function ProviderModelsLoader() {
useSyncProvider('base')
useSyncProvider('ollama')
useSyncProvider('openrouter')
return null
}

View File

@@ -4,6 +4,7 @@ import React from 'react'
import { Tooltip } from '@/components/emcn'
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ProviderModelsLoader } from './provider-models-loader'
import { SettingsLoader } from './settings-loader'
interface ProvidersProps {
@@ -14,6 +15,7 @@ const Providers = React.memo<ProvidersProps>(({ children }) => {
return (
<>
<SettingsLoader />
<ProviderModelsLoader />
<GlobalCommandsProvider>
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
<WorkspacePermissionsProvider>{children}</WorkspacePermissionsProvider>

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { Check, ChevronDown, FileText, RefreshCw } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
@@ -15,8 +15,9 @@ import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { useKnowledgeBaseDocuments } from '@/hooks/use-knowledge'
import { useDisplayNamesStore } from '@/stores/display-names/store'
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
import type { DocumentData } from '@/stores/knowledge/store'
interface DocumentSelectorProps {
blockId: string
@@ -45,68 +46,29 @@ export function DocumentSelector({
? knowledgeBaseId
: null
const documentsCache = useKnowledgeStore(
useCallback(
(state) =>
normalizedKnowledgeBaseId ? state.documents[normalizedKnowledgeBaseId] : undefined,
[normalizedKnowledgeBaseId]
)
)
const isDocumentsLoading = useKnowledgeStore(
useCallback(
(state) =>
normalizedKnowledgeBaseId ? state.isDocumentsLoading(normalizedKnowledgeBaseId) : false,
[normalizedKnowledgeBaseId]
)
)
const getDocuments = useKnowledgeStore((state) => state.getDocuments)
const value = isPreview ? previewValue : storeValue
const { finalDisabled } = useDependsOnGate(blockId, subBlock, { disabled, isPreview })
const isDisabled = finalDisabled
const documents = useMemo<DocumentData[]>(() => {
if (!documentsCache) return []
return documentsCache.documents ?? []
}, [documentsCache])
const loadDocuments = useCallback(async () => {
if (!normalizedKnowledgeBaseId) {
setError('No knowledge base selected')
return
}
setError(null)
try {
const fetchedDocuments = await getDocuments(normalizedKnowledgeBaseId)
if (fetchedDocuments.length > 0) {
const documentMap = fetchedDocuments.reduce<Record<string, string>>((acc, doc) => {
acc[doc.id] = doc.filename
return acc
}, {})
useDisplayNamesStore
.getState()
.setDisplayNames('documents', normalizedKnowledgeBaseId, documentMap)
}
} catch (err) {
if (err instanceof Error && err.name === 'AbortError') return
setError(err instanceof Error ? err.message : 'Failed to fetch documents')
}
}, [normalizedKnowledgeBaseId, getDocuments])
const {
documents,
isLoading: documentsLoading,
error: documentsError,
refreshDocuments,
} = useKnowledgeBaseDocuments(normalizedKnowledgeBaseId ?? '', {
limit: 500,
offset: 0,
enabled: open && Boolean(normalizedKnowledgeBaseId),
})
const handleOpenChange = (isOpen: boolean) => {
if (isPreview || isDisabled) return
setOpen(isOpen)
if (isOpen && (!documentsCache || !documentsCache.documents.length)) {
void loadDocuments()
if (isOpen && normalizedKnowledgeBaseId) {
void refreshDocuments()
}
}
@@ -119,9 +81,15 @@ export function DocumentSelector({
}
useEffect(() => {
setError(null)
if (!normalizedKnowledgeBaseId) {
setError(null)
}
}, [normalizedKnowledgeBaseId])
useEffect(() => {
setError(documentsError)
}, [documentsError])
useEffect(() => {
if (!normalizedKnowledgeBaseId || documents.length === 0) return
@@ -152,7 +120,7 @@ export function DocumentSelector({
}
const label = subBlock.placeholder || 'Select document'
const isLoading = isDocumentsLoading && !error
const isLoading = documentsLoading && !error
// Always use cached display name
const displayName = useDisplayNamesStore(

View File

@@ -1,13 +1,16 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useCallback, useMemo } from 'react'
import { useQueries } from '@tanstack/react-query'
import { X } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Combobox, type ComboboxOption } from '@/components/emcn/components/combobox/combobox'
import { PackageSearchIcon } from '@/components/icons'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel-new/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { type KnowledgeBaseData, useKnowledgeStore } from '@/stores/knowledge/store'
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/knowledge'
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
interface KnowledgeBaseSelectorProps {
blockId: string
@@ -29,14 +32,11 @@ export function KnowledgeBaseSelector({
const params = useParams()
const workspaceId = params.workspaceId as string
const knowledgeBasesList = useKnowledgeStore((state) => state.knowledgeBasesList)
const knowledgeBasesMap = useKnowledgeStore((state) => state.knowledgeBases)
const loadingKnowledgeBasesList = useKnowledgeStore((state) => state.loadingKnowledgeBasesList)
const getKnowledgeBasesList = useKnowledgeStore((state) => state.getKnowledgeBasesList)
const getKnowledgeBase = useKnowledgeStore((state) => state.getKnowledgeBase)
const [error, setError] = useState<string | null>(null)
const hasRequestedListRef = useRef(false)
const {
knowledgeBases,
isLoading: isKnowledgeBasesLoading,
error,
} = useKnowledgeBasesList(workspaceId)
// Use the proper hook to get the current value and setter - this prevents infinite loops
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
@@ -46,28 +46,6 @@ export function KnowledgeBaseSelector({
const isMultiSelect = subBlock.multiSelect === true
/**
* Convert knowledge bases to combobox options format
*/
const combinedKnowledgeBases = useMemo<KnowledgeBaseData[]>(() => {
const merged = new Map<string, KnowledgeBaseData>()
knowledgeBasesList.forEach((kb) => {
merged.set(kb.id, kb)
})
Object.values(knowledgeBasesMap).forEach((kb) => {
merged.set(kb.id, kb)
})
return Array.from(merged.values())
}, [knowledgeBasesList, knowledgeBasesMap])
const options = useMemo<ComboboxOption[]>(() => {
return combinedKnowledgeBases.map((kb) => ({
label: kb.name,
value: kb.id,
icon: PackageSearchIcon,
}))
}, [combinedKnowledgeBases])
/**
* Parse value into array of selected IDs
*/
@@ -84,6 +62,39 @@ export function KnowledgeBaseSelector({
return []
}, [value])
/**
* Convert knowledge bases to combobox options format
*/
const selectedKnowledgeBaseQueries = useQueries({
queries: selectedIds.map((selectedId) => ({
queryKey: knowledgeKeys.detail(selectedId),
queryFn: () => fetchKnowledgeBase(selectedId),
enabled: Boolean(selectedId),
staleTime: 60 * 1000,
})),
})
const combinedKnowledgeBases = useMemo<KnowledgeBaseData[]>(() => {
const merged = new Map<string, KnowledgeBaseData>()
knowledgeBases.forEach((kb) => merged.set(kb.id, kb))
selectedKnowledgeBaseQueries.forEach((query) => {
if (query.data) {
merged.set(query.data.id, query.data)
}
})
return Array.from(merged.values())
}, [knowledgeBases, selectedKnowledgeBaseQueries])
const options = useMemo<ComboboxOption[]>(() => {
return combinedKnowledgeBases.map((kb) => ({
label: kb.name,
value: kb.id,
icon: PackageSearchIcon,
}))
}, [combinedKnowledgeBases])
/**
* Compute selected knowledge bases for tag display
*/
@@ -144,44 +155,6 @@ export function KnowledgeBaseSelector({
[isPreview, selectedIds, setStoreValue, onKnowledgeBaseSelect]
)
/**
* Fetch knowledge bases on initial mount
*/
useEffect(() => {
if (hasRequestedListRef.current) return
let cancelled = false
hasRequestedListRef.current = true
setError(null)
getKnowledgeBasesList(workspaceId).catch((err) => {
if (cancelled) return
setError(err instanceof Error ? err.message : 'Failed to load knowledge bases')
})
return () => {
cancelled = true
}
}, [workspaceId, getKnowledgeBasesList])
/**
* Ensure selected knowledge bases are cached
*/
useEffect(() => {
if (selectedIds.length === 0) return
selectedIds.forEach((id) => {
const isKnown =
Boolean(knowledgeBasesMap[id]) ||
knowledgeBasesList.some((knowledgeBase) => knowledgeBase.id === id)
if (!isKnown) {
void getKnowledgeBase(id).catch(() => {
// Ignore fetch errors here; they will surface via display hooks if needed
})
}
})
}, [selectedIds, knowledgeBasesList, knowledgeBasesMap, getKnowledgeBase])
const label =
subBlock.placeholder || (isMultiSelect ? 'Select knowledge bases' : 'Select knowledge base')
@@ -221,7 +194,7 @@ export function KnowledgeBaseSelector({
onMultiSelectChange={handleMultiSelectChange}
placeholder={label}
disabled={disabled || isPreview}
isLoading={loadingKnowledgeBasesList}
isLoading={isKnowledgeBasesLoading}
error={error}
/>
</div>

View File

@@ -85,7 +85,7 @@ export function useWorkflowExecution() {
const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole } = useTerminalConsoleStore()
const { getAllVariables, loadWorkspaceEnvironment } = useEnvironmentStore()
const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore()
const {
isExecuting,
@@ -650,7 +650,6 @@ export function useWorkflowExecution() {
currentWorkflow,
toggleConsole,
getAllVariables,
loadWorkspaceEnvironment,
getVariablesByWorkflowId,
setIsExecuting,
setIsDebugging,

View File

@@ -34,6 +34,7 @@ import {
getSubscriptionPermissions,
getVisiblePlans,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components-new/settings-modal/components/subscription/subscription-permissions'
import { useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
import { useOrganizationBilling, useOrganizations } from '@/hooks/queries/organization'
import { useSubscriptionData, useUsageLimitData } from '@/hooks/queries/subscription'
import { useUpdateWorkspaceSettings, useWorkspaceSettings } from '@/hooks/queries/workspace'
@@ -626,9 +627,9 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
}
function BillingUsageNotificationsToggle() {
const isLoading = useGeneralStore((s) => s.isBillingUsageNotificationsLoading)
const enabled = useGeneralStore((s) => s.isBillingUsageNotificationsEnabled)
const setEnabled = useGeneralStore((s) => s.setBillingUsageNotificationsEnabled)
const updateSetting = useUpdateGeneralSetting()
const isLoading = updateSetting.isPending
// Settings are automatically loaded by SettingsLoader provider
// No need to load here - Zustand is synced from React Query
@@ -643,7 +644,9 @@ function BillingUsageNotificationsToggle() {
checked={!!enabled}
disabled={isLoading}
onCheckedChange={(v: boolean) => {
void setEnabled(v)
if (v !== enabled) {
updateSetting.mutate({ key: 'billingUsageNotificationsEnabled', value: v })
}
}}
/>
</div>

View File

@@ -13,7 +13,8 @@ import {
useItemRename,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useDeleteFolder, useDuplicateFolder } from '@/app/workspace/[workspaceId]/w/hooks'
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
import { useUpdateFolder } from '@/hooks/queries/folders'
import type { FolderTreeNode } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
interface FolderItemProps {
@@ -37,7 +38,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
const params = useParams()
const router = useRouter()
const workspaceId = params.workspaceId as string
const { updateFolderAPI } = useFolderStore()
const updateFolderMutation = useUpdateFolder()
const { createWorkflow } = useWorkflowRegistry()
// Delete modal state
@@ -125,7 +126,11 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
} = useItemRename({
initialName: folder.name,
onSave: async (newName) => {
await updateFolderAPI(folder.id, { name: newName })
await updateFolderMutation.mutateAsync({
workspaceId,
id: folder.id,
updates: { name: newName },
})
},
itemType: 'folder',
itemId: folder.id,

View File

@@ -10,6 +10,7 @@ import {
useWorkflowSelection,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks/use-import-workflow'
import { useFolders } from '@/hooks/queries/folders'
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
@@ -56,14 +57,9 @@ export function WorkflowList({
const workspaceId = params.workspaceId as string
const workflowId = params.workflowId as string
const {
getFolderTree,
expandedFolders,
fetchFolders,
isLoading: foldersLoading,
getFolderPath,
setExpanded,
} = useFolderStore()
const { isLoading: foldersLoading } = useFolders(workspaceId)
const { getFolderTree, expandedFolders, getFolderPath, setExpanded } = useFolderStore()
const {
dropTargetId,
@@ -169,15 +165,6 @@ export function WorkflowList({
}
}, [workflowId, activeWorkflowFolderId, isLoading, foldersLoading, getFolderPath, setExpanded])
/**
* Fetch folders when workspace changes
*/
useEffect(() => {
if (workspaceId) {
fetchFolders(workspaceId)
}
}, [workspaceId, fetchFolders])
const renderWorkflowItem = useCallback(
(workflow: WorkflowMetadata, level: number, parentFolderId: string | null = null) => (
<div key={workflow.id} className='relative' {...createItemDragHandlers(parentFolderId)}>

View File

@@ -14,7 +14,7 @@ import {
extractWorkflowsFromZip,
} from '@/lib/workflows/import-export'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useFolderStore } from '@/stores/folders/store'
import { useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -43,7 +43,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
const params = useParams()
const router = useRouter()
const workspaceId = params.workspaceId as string
const { createFolder } = useFolderStore()
const createFolderMutation = useCreateFolder()
const { createWorkflow } = useWorkflowRegistry()
const userPermissions = useUserPermissionsContext()
const fileInputRef = useRef<HTMLInputElement>(null)
@@ -110,14 +110,14 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
try {
setIsCreating(true)
const folderName = await generateFolderName(workspaceId)
await createFolder({ name: folderName, workspaceId })
await createFolderMutation.mutateAsync({ name: folderName, workspaceId })
logger.info(`Created folder: ${folderName}`)
} catch (error) {
logger.error('Failed to create folder:', { error })
} finally {
setIsCreating(false)
}
}, [createFolder, workspaceId, isCreating])
}, [createFolderMutation, workspaceId, isCreating])
const handleImportWorkflow = useCallback(() => {
setIsOpen(false)
@@ -143,9 +143,8 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
importedWorkflows = extractedWorkflows
const { createFolder } = useFolderStore.getState()
const folderName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
const importFolder = await createFolder({
const importFolder = await createFolderMutation.mutateAsync({
name: folderName,
workspaceId,
})
@@ -175,7 +174,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
if (!folderMap.has(pathSegment)) {
const subFolder = await createFolder({
const subFolder = await createFolderMutation.mutateAsync({
name: workflow.folderPath[i],
workspaceId,
parentId,
@@ -302,9 +301,6 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
const { loadWorkflows } = useWorkflowRegistry.getState()
await loadWorkflows(workspaceId)
const { fetchFolders } = useFolderStore.getState()
await fetchFolders(workspaceId)
} catch (error) {
logger.error('Failed to import workflows:', error)
} finally {
@@ -314,7 +310,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
}
}
},
[workspaceId, createWorkflow]
[workspaceId, createWorkflow, createFolderMutation]
)
// Button event handlers

View File

@@ -18,6 +18,7 @@ import {
import { Button } from '@/components/ui/button'
import { createLogger } from '@/lib/logs/console/logger'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useDeleteFolderMutation, useUpdateFolder } from '@/hooks/queries/folders'
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
const logger = createLogger('FolderItem')
@@ -45,7 +46,9 @@ export function FolderItem({
isFirstItem = false,
level,
}: FolderItemProps) {
const { expandedFolders, toggleExpanded, updateFolderAPI, deleteFolder } = useFolderStore()
const { expandedFolders, toggleExpanded } = useFolderStore()
const updateFolderMutation = useUpdateFolder()
const deleteFolderMutation = useDeleteFolderMutation()
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [isDragging, setIsDragging] = useState(false)
const [isEditing, setIsEditing] = useState(false)
@@ -127,7 +130,11 @@ export function FolderItem({
setIsRenaming(true)
try {
await updateFolderAPI(folder.id, { name: editValue.trim() })
await updateFolderMutation.mutateAsync({
workspaceId,
id: folder.id,
updates: { name: editValue.trim() },
})
logger.info(`Successfully renamed folder from "${folder.name}" to "${editValue.trim()}"`)
setIsEditing(false)
} catch (error) {
@@ -171,7 +178,7 @@ export function FolderItem({
setShowDeleteDialog(false)
try {
await deleteFolder(folder.id, workspaceId)
await deleteFolderMutation.mutateAsync({ id: folder.id, workspaceId })
} catch (error) {
logger.error('Failed to delete folder:', { error })
}

View File

@@ -7,7 +7,8 @@ import { Skeleton } from '@/components/ui/skeleton'
import { createLogger } from '@/lib/logs/console/logger'
import { FolderItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/folder-tree/components/folder-item'
import { WorkflowItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/folder-tree/components/workflow-item'
import { type FolderTreeNode, useFolderStore } from '@/stores/folders/store'
import { useFolders, useUpdateFolder } from '@/hooks/queries/folders'
import { type FolderTreeNode, useFolderStore, type WorkflowFolder } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
@@ -390,24 +391,18 @@ export function FolderTree({
const params = useParams()
const workspaceId = params.workspaceId as string
const workflowId = params.workflowId as string
const {
getFolderTree,
expandedFolders,
fetchFolders,
isLoading: foldersLoading,
clearSelection,
updateFolderAPI,
getFolderPath,
setExpanded,
} = useFolderStore()
const foldersQuery = useFolders(workspaceId)
const updateFolder = useUpdateFolder()
const { getFolderTree, expandedFolders, clearSelection, getFolderPath, setExpanded } =
useFolderStore()
const { updateWorkflow } = useWorkflowRegistry()
// Memoize the active workflow's folder ID to avoid unnecessary re-runs
const activeWorkflowFolderId = useMemo(() => {
if (!workflowId || isLoading || foldersLoading) return null
if (!workflowId || isLoading || foldersQuery.isLoading) return null
const activeWorkflow = regularWorkflows.find((workflow) => workflow.id === workflowId)
return activeWorkflow?.folderId || null
}, [workflowId, regularWorkflows, isLoading, foldersLoading])
}, [workflowId, regularWorkflows, isLoading, foldersQuery.isLoading])
// Auto-expand folders when a workflow is active
useEffect(() => {
@@ -426,7 +421,7 @@ export function FolderTree({
// Clean up any existing folders with 3+ levels of nesting
const cleanupDeepNesting = useCallback(async () => {
const { getFolderTree, updateFolderAPI } = useFolderStore.getState()
const { getFolderTree } = useFolderStore.getState()
const folderTree = getFolderTree(workspaceId)
const findDeepFolders = (nodes: FolderTreeNode[], currentLevel = 0): FolderTreeNode[] => {
@@ -452,23 +447,24 @@ export function FolderTree({
// Move deeply nested folders to root level
for (const folder of deepFolders) {
try {
await updateFolderAPI(folder.id, { parentId: null })
await updateFolder.mutateAsync({
workspaceId,
id: folder.id,
updates: { parentId: null },
})
logger.info(`Moved deeply nested folder "${folder.name}" to root level`)
} catch (error) {
logger.error(`Failed to move folder "${folder.name}":`, error)
}
}
}, [workspaceId])
}, [workspaceId, updateFolder])
// Fetch folders when workspace changes
useEffect(() => {
if (workspaceId) {
fetchFolders(workspaceId).then(() => {
// Clean up any existing deep nesting after folders are loaded
cleanupDeepNesting()
})
if (workspaceId && foldersQuery.data) {
cleanupDeepNesting()
}
}, [workspaceId, fetchFolders, cleanupDeepNesting])
}, [workspaceId, foldersQuery.data, cleanupDeepNesting])
useEffect(() => {
clearSelection()
@@ -487,13 +483,19 @@ export function FolderTree({
{} as Record<string, WorkflowMetadata[]>
)
const updateFolderFn = useCallback(
(id: string, updates: Partial<WorkflowFolder>) =>
updateFolder.mutateAsync({ workspaceId, id, updates }),
[updateFolder, workspaceId]
)
const {
isDragOver: rootDragOver,
isInvalidDrop: rootInvalidDrop,
handleDragOver: handleRootDragOver,
handleDragLeave: handleRootDragLeave,
handleDrop: handleRootDrop,
} = useDragHandlers(updateWorkflow, updateFolderAPI, null, 'Moved workflow(s) to root')
} = useDragHandlers(updateWorkflow, updateFolderFn, null, 'Moved workflow(s) to root')
const renderFolderTree = (
nodes: FolderTreeNode[],
@@ -510,7 +512,7 @@ export function FolderTree({
expandedFolders={expandedFolders}
pathname={pathname}
updateWorkflow={updateWorkflow}
updateFolder={updateFolderAPI}
updateFolder={updateFolderFn}
renderFolderTree={renderFolderTree}
parentDragOver={parentDragOver}
isFirstItem={level === 0 && index === 0}
@@ -518,7 +520,7 @@ export function FolderTree({
))
}
const showLoading = isLoading || foldersLoading
const showLoading = isLoading || foldersQuery.isLoading
const rootWorkflows = workflowsByFolder.root || []
// Render skeleton loading state

View File

@@ -1,5 +1,7 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import { useParams } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger'
import { useUpdateFolder } from '@/hooks/queries/folders'
import { useFolderStore } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -31,7 +33,10 @@ export function useDragDrop() {
const hoverExpandTimerRef = useRef<number | null>(null)
const lastDragYRef = useRef<number>(0)
const { updateFolderAPI, getFolderPath, setExpanded, expandedFolders } = useFolderStore()
const params = useParams()
const workspaceId = params.workspaceId as string | undefined
const updateFolderMutation = useUpdateFolder()
const { setExpanded, expandedFolders } = useFolderStore()
const { updateWorkflow } = useWorkflowRegistry()
/**
@@ -192,13 +197,21 @@ export function useDragDrop() {
return
}
await updateFolderAPI(draggedFolderId, { parentId: targetFolderId })
if (!workspaceId) {
logger.warn('No workspaceId available for folder move')
return
}
await updateFolderMutation.mutateAsync({
workspaceId,
id: draggedFolderId,
updates: { parentId: targetFolderId },
})
logger.info(`Moved folder to ${targetFolderId ? `folder ${targetFolderId}` : 'root'}`)
} catch (error) {
logger.error('Failed to move folder:', error)
}
},
[updateFolderAPI]
[updateFolderMutation, workspaceId]
)
/**

View File

@@ -1,7 +1,7 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { generateFolderName } from '@/lib/naming'
import { useFolderStore } from '@/stores/folders/store'
import { useCreateFolder } from '@/hooks/queries/folders'
const logger = createLogger('useFolderOperations')
@@ -17,7 +17,7 @@ interface UseFolderOperationsProps {
* @returns Folder operations state and handlers
*/
export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
const { createFolder } = useFolderStore()
const createFolderMutation = useCreateFolder()
const [isCreatingFolder, setIsCreatingFolder] = useState(false)
/**
@@ -32,7 +32,7 @@ export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
try {
setIsCreatingFolder(true)
const folderName = await generateFolderName(workspaceId)
const folder = await createFolder({ name: folderName, workspaceId })
const folder = await createFolderMutation.mutateAsync({ name: folderName, workspaceId })
logger.info(`Created folder: ${folderName}`)
return folder.id
} catch (error) {
@@ -41,7 +41,7 @@ export function useFolderOperations({ workspaceId }: UseFolderOperationsProps) {
} finally {
setIsCreatingFolder(false)
}
}, [createFolder, workspaceId, isCreatingFolder])
}, [createFolderMutation, workspaceId, isCreatingFolder])
return {
// State

View File

@@ -1,5 +1,6 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { useDeleteFolderMutation } from '@/hooks/queries/folders'
import { useFolderStore } from '@/stores/folders/store'
const logger = createLogger('useDeleteFolder')
@@ -34,7 +35,7 @@ interface UseDeleteFolderProps {
* @returns Delete folder handlers and state
*/
export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDeleteFolderProps) {
const { deleteFolder } = useFolderStore()
const deleteFolderMutation = useDeleteFolderMutation()
const [isDeleting, setIsDeleting] = useState(false)
/**
@@ -58,7 +59,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
// Delete each folder sequentially
for (const folderId of folderIdsToDelete) {
await deleteFolder(folderId, workspaceId)
await deleteFolderMutation.mutateAsync({ id: folderId, workspaceId })
}
// Clear selection after successful deletion
@@ -73,7 +74,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
} finally {
setIsDeleting(false)
}
}, [getFolderIds, isDeleting, deleteFolder, workspaceId, onSuccess])
}, [getFolderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
return {
isDeleting,

View File

@@ -1,5 +1,6 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { useDuplicateFolderMutation } from '@/hooks/queries/folders'
import { useFolderStore } from '@/stores/folders/store'
const logger = createLogger('useDuplicateFolder')
@@ -38,7 +39,7 @@ export function useDuplicateFolder({
getFolderIds,
onSuccess,
}: UseDuplicateFolderProps) {
const { duplicateFolder } = useFolderStore()
const duplicateFolderMutation = useDuplicateFolderMutation()
const [isDuplicating, setIsDuplicating] = useState(false)
/**
@@ -64,7 +65,8 @@ export function useDuplicateFolder({
// Duplicate each folder sequentially
for (const folderId of folderIdsToDuplicate) {
const newFolderId = await duplicateFolder(folderId)
const result = await duplicateFolderMutation.mutateAsync({ id: folderId, workspaceId })
const newFolderId = result?.id
if (newFolderId) {
duplicatedIds.push(newFolderId)
}
@@ -86,7 +88,7 @@ export function useDuplicateFolder({
} finally {
setIsDuplicating(false)
}
}, [getFolderIds, isDuplicating, duplicateFolder, onSuccess])
}, [getFolderIds, isDuplicating, duplicateFolderMutation, workspaceId, onSuccess])
return {
isDuplicating,

View File

@@ -1,4 +1,5 @@
import { useCallback, useState } from 'react'
import { useQueryClient } from '@tanstack/react-query'
import { useRouter } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger'
import {
@@ -6,7 +7,7 @@ import {
extractWorkflowsFromFiles,
extractWorkflowsFromZip,
} from '@/lib/workflows/import-export'
import { useFolderStore } from '@/stores/folders/store'
import { folderKeys, useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -30,6 +31,8 @@ interface UseImportWorkflowProps {
export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
const router = useRouter()
const { createWorkflow, loadWorkflows } = useWorkflowRegistry()
const queryClient = useQueryClient()
const createFolderMutation = useCreateFolder()
const [isImporting, setIsImporting] = useState(false)
/**
@@ -119,9 +122,11 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
const zipFile = fileArray[0]
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
const { createFolder } = useFolderStore.getState()
const folderName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
const importFolder = await createFolder({ name: folderName, workspaceId })
const importFolder = await createFolderMutation.mutateAsync({
name: folderName,
workspaceId,
})
const folderMap = new Map<string, string>()
for (const workflow of extractedWorkflows) {
@@ -139,7 +144,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
if (!folderMap.has(pathSegment)) {
const subFolder = await createFolder({
const subFolder = await createFolderMutation.mutateAsync({
name: workflow.folderPath[i],
workspaceId,
parentId,
@@ -181,7 +186,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
// Reload workflows to show newly imported ones
await loadWorkflows(workspaceId)
await useFolderStore.getState().fetchFolders(workspaceId)
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
@@ -200,7 +205,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
}
}
},
[importSingleWorkflow, workspaceId, loadWorkflows, router]
[importSingleWorkflow, workspaceId, loadWorkflows, router, createFolderMutation, queryClient]
)
return {

View File

@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react'
import { useRouter } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger'
import { extractWorkflowName, extractWorkflowsFromZip } from '@/lib/workflows/import-export'
import { useFolderStore } from '@/stores/folders/store'
import { useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
@@ -33,6 +33,7 @@ interface UseImportWorkspaceProps {
export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {}) {
const router = useRouter()
const [isImporting, setIsImporting] = useState(false)
const createFolderMutation = useCreateFolder()
/**
* Handle workspace import from ZIP file
@@ -75,7 +76,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
const { workspace: newWorkspace } = await createResponse.json()
logger.info('Created new workspace:', newWorkspace)
const { createFolder } = useFolderStore.getState()
const folderMap = new Map<string, string>()
// Import workflows
@@ -100,7 +100,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
if (!folderMap.has(pathSegment)) {
const subFolder = await createFolder({
const subFolder = await createFolderMutation.mutateAsync({
name: workflow.folderPath[i],
workspaceId: newWorkspace.id,
parentId: parentId || undefined,
@@ -192,7 +192,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
setIsImporting(false)
}
},
[isImporting, router, onSuccess]
[isImporting, router, onSuccess, createFolderMutation]
)
return {

View File

@@ -1,5 +1,7 @@
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { createLogger } from '@/lib/logs/console/logger'
import { useCustomToolsStore } from '@/stores/custom-tools/store'
import type { CustomToolDefinition, CustomToolSchema } from '@/stores/custom-tools/types'
const logger = createLogger('CustomToolsQueries')
const API_ENDPOINT = '/api/tools/custom'
@@ -14,32 +16,62 @@ export const customToolsKeys = {
detail: (toolId: string) => [...customToolsKeys.all, 'detail', toolId] as const,
}
/**
* Custom Tool Types
*/
export interface CustomToolSchema {
function?: {
name?: string
description?: string
parameters?: any
export type CustomTool = CustomToolDefinition
type ApiCustomTool = Partial<CustomToolDefinition> & {
id: string
title: string
schema: Partial<CustomToolSchema> & {
function?: Partial<CustomToolSchema['function']> & {
parameters?: Partial<CustomToolSchema['function']['parameters']>
}
}
code?: string
}
function normalizeCustomTool(tool: ApiCustomTool, workspaceId: string): CustomToolDefinition {
const fallbackName = tool.schema.function?.name || tool.id
const parameters = tool.schema.function?.parameters ?? {
type: 'object',
properties: {},
}
return {
id: tool.id,
title: tool.title,
code: typeof tool.code === 'string' ? tool.code : '',
workspaceId: tool.workspaceId ?? workspaceId ?? null,
userId: tool.userId ?? null,
createdAt:
typeof tool.createdAt === 'string'
? tool.createdAt
: tool.updatedAt && typeof tool.updatedAt === 'string'
? tool.updatedAt
: new Date().toISOString(),
updatedAt: typeof tool.updatedAt === 'string' ? tool.updatedAt : undefined,
schema: {
type: tool.schema.type ?? 'function',
function: {
name: fallbackName,
description: tool.schema.function?.description,
parameters: {
type: parameters.type ?? 'object',
properties: parameters.properties ?? {},
required: parameters.required,
},
},
},
}
}
export interface CustomTool {
id: string
title: string
schema?: CustomToolSchema
code: string
workspaceId?: string
userId?: string
createdAt?: string
updatedAt?: string
function syncCustomToolsToStore(tools: CustomToolDefinition[]) {
useCustomToolsStore.getState().setTools(tools)
}
/**
* Fetch custom tools for a workspace
*/
async function fetchCustomTools(workspaceId: string): Promise<CustomTool[]> {
async function fetchCustomTools(workspaceId: string): Promise<CustomToolDefinition[]> {
const response = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
if (!response.ok) {
@@ -53,45 +85,68 @@ async function fetchCustomTools(workspaceId: string): Promise<CustomTool[]> {
throw new Error('Invalid response format')
}
// Filter and validate tools
const validTools = data.filter((tool, index) => {
const normalizedTools: CustomToolDefinition[] = []
data.forEach((tool, index) => {
if (!tool || typeof tool !== 'object') {
logger.warn(`Skipping invalid tool at index ${index}: not an object`)
return false
return
}
if (!tool.id || typeof tool.id !== 'string') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid id`)
return false
return
}
if (!tool.title || typeof tool.title !== 'string') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid title`)
return false
return
}
if (!tool.schema || typeof tool.schema !== 'object') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid schema`)
return false
return
}
if (!tool.code || typeof tool.code !== 'string') {
logger.warn(`Tool at index ${index} missing code field, defaulting to empty string`)
tool.code = ''
if (!tool.schema.function || typeof tool.schema.function !== 'object') {
logger.warn(`Skipping invalid tool at index ${index}: missing function schema`)
return
}
const apiTool: ApiCustomTool = {
id: tool.id,
title: tool.title,
schema: tool.schema,
code: typeof tool.code === 'string' ? tool.code : '',
workspaceId: tool.workspaceId ?? null,
userId: tool.userId ?? null,
createdAt: tool.createdAt ?? undefined,
updatedAt: tool.updatedAt ?? undefined,
}
try {
normalizedTools.push(normalizeCustomTool(apiTool, workspaceId))
} catch (error) {
logger.warn(`Failed to normalize custom tool at index ${index}`, { error })
}
return true
})
return validTools
return normalizedTools
}
/**
* Hook to fetch custom tools
*/
export function useCustomTools(workspaceId: string) {
return useQuery({
const query = useQuery<CustomToolDefinition[]>({
queryKey: customToolsKeys.list(workspaceId),
queryFn: () => fetchCustomTools(workspaceId),
enabled: !!workspaceId,
staleTime: 60 * 1000, // 1 minute - tools don't change frequently
placeholderData: keepPreviousData,
})
if (query.data) {
syncCustomToolsToStore(query.data)
}
return query
}
/**
@@ -169,7 +224,9 @@ export function useUpdateCustomTool() {
logger.info(`Updating custom tool: ${toolId} in workspace ${workspaceId}`)
// Get the current tool to merge with updates
const currentTools = queryClient.getQueryData<CustomTool[]>(customToolsKeys.list(workspaceId))
const currentTools = queryClient.getQueryData<CustomToolDefinition[]>(
customToolsKeys.list(workspaceId)
)
const currentTool = currentTools?.find((t) => t.id === toolId)
if (!currentTool) {
@@ -210,13 +267,13 @@ export function useUpdateCustomTool() {
await queryClient.cancelQueries({ queryKey: customToolsKeys.list(workspaceId) })
// Snapshot the previous value
const previousTools = queryClient.getQueryData<CustomTool[]>(
const previousTools = queryClient.getQueryData<CustomToolDefinition[]>(
customToolsKeys.list(workspaceId)
)
// Optimistically update to the new value
if (previousTools) {
queryClient.setQueryData<CustomTool[]>(
queryClient.setQueryData<CustomToolDefinition[]>(
customToolsKeys.list(workspaceId),
previousTools.map((tool) =>
tool.id === toolId
@@ -285,13 +342,13 @@ export function useDeleteCustomTool() {
await queryClient.cancelQueries({ queryKey: customToolsKeys.list(workspaceId) })
// Snapshot the previous value
const previousTools = queryClient.getQueryData<CustomTool[]>(
const previousTools = queryClient.getQueryData<CustomToolDefinition[]>(
customToolsKeys.list(workspaceId)
)
// Optimistically update to the new value
if (previousTools) {
queryClient.setQueryData<CustomTool[]>(
queryClient.setQueryData<CustomToolDefinition[]>(
customToolsKeys.list(workspaceId),
previousTools.filter((tool) => tool.id !== toolId)
)

View File

@@ -1,6 +1,14 @@
import { useEffect } from 'react'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { WorkspaceEnvironmentData } from '@/lib/environment/api'
import { fetchPersonalEnvironment, fetchWorkspaceEnvironment } from '@/lib/environment/api'
import { createLogger } from '@/lib/logs/console/logger'
import { API_ENDPOINTS } from '@/stores/constants'
import { useEnvironmentStore } from '@/stores/settings/environment/store'
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
export type { WorkspaceEnvironmentData } from '@/lib/environment/api'
export type { EnvironmentVariable } from '@/stores/settings/environment/types'
const logger = createLogger('EnvironmentQueries')
@@ -16,65 +24,26 @@ export const environmentKeys = {
/**
* Environment Variable Types
*/
export interface EnvironmentVariable {
key: string
value: string
}
export interface WorkspaceEnvironmentData {
workspace: Record<string, string>
personal: Record<string, string>
conflicts: string[]
}
/**
* Fetch personal environment variables
*/
async function fetchPersonalEnvironment(): Promise<Record<string, EnvironmentVariable>> {
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
if (!response.ok) {
throw new Error(`Failed to load environment variables: ${response.statusText}`)
}
const { data } = await response.json()
if (data && typeof data === 'object') {
return data
}
return {}
}
/**
* Hook to fetch personal environment variables
*/
export function usePersonalEnvironment() {
return useQuery({
const setVariables = useEnvironmentStore((state) => state.setVariables)
const query = useQuery({
queryKey: environmentKeys.personal(),
queryFn: fetchPersonalEnvironment,
staleTime: 60 * 1000, // 1 minute
placeholderData: keepPreviousData,
})
}
/**
* Fetch workspace environment variables
*/
async function fetchWorkspaceEnvironment(workspaceId: string): Promise<WorkspaceEnvironmentData> {
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
useEffect(() => {
if (query.data) {
setVariables(query.data)
}
}, [query.data, setVariables])
if (!response.ok) {
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
}
const { data } = await response.json()
return {
workspace: data.workspace || {},
personal: data.personal || {},
conflicts: data.conflicts || [],
}
return query
}
/**

View File

@@ -0,0 +1,186 @@
import { useEffect } from 'react'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { createLogger } from '@/lib/logs/console/logger'
import { useFolderStore, type WorkflowFolder } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('FolderQueries')
export const folderKeys = {
all: ['folders'] as const,
lists: () => [...folderKeys.all, 'list'] as const,
list: (workspaceId: string | undefined) => [...folderKeys.lists(), workspaceId ?? ''] as const,
}
function mapFolder(folder: any): WorkflowFolder {
return {
id: folder.id,
name: folder.name,
userId: folder.userId,
workspaceId: folder.workspaceId,
parentId: folder.parentId,
color: folder.color,
isExpanded: folder.isExpanded,
sortOrder: folder.sortOrder,
createdAt: new Date(folder.createdAt),
updatedAt: new Date(folder.updatedAt),
}
}
async function fetchFolders(workspaceId: string): Promise<WorkflowFolder[]> {
const response = await fetch(`/api/folders?workspaceId=${workspaceId}`)
if (!response.ok) {
throw new Error('Failed to fetch folders')
}
const { folders }: { folders: any[] } = await response.json()
return folders.map(mapFolder)
}
export function useFolders(workspaceId?: string) {
const setFolders = useFolderStore((state) => state.setFolders)
const query = useQuery({
queryKey: folderKeys.list(workspaceId),
queryFn: () => fetchFolders(workspaceId as string),
enabled: Boolean(workspaceId),
placeholderData: keepPreviousData,
staleTime: 60 * 1000,
})
useEffect(() => {
if (query.data) {
setFolders(query.data)
}
}, [query.data, setFolders])
return query
}
interface CreateFolderVariables {
workspaceId: string
name: string
parentId?: string
color?: string
}
interface UpdateFolderVariables {
workspaceId: string
id: string
updates: Partial<Pick<WorkflowFolder, 'name' | 'parentId' | 'color' | 'sortOrder'>>
}
interface DeleteFolderVariables {
workspaceId: string
id: string
}
interface DuplicateFolderVariables {
workspaceId: string
id: string
}
export function useCreateFolder() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ workspaceId, ...payload }: CreateFolderVariables) => {
const response = await fetch('/api/folders', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...payload, workspaceId }),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to create folder')
}
const { folder } = await response.json()
return mapFolder(folder)
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
},
})
}
export function useUpdateFolder() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ workspaceId, id, updates }: UpdateFolderVariables) => {
const response = await fetch(`/api/folders/${id}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(updates),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to update folder')
}
const { folder } = await response.json()
return mapFolder(folder)
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
},
})
}
export function useDeleteFolderMutation() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ workspaceId: _workspaceId, id }: DeleteFolderVariables) => {
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to delete folder')
}
return response.json()
},
onSuccess: async (_data, variables) => {
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
try {
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
} catch (error) {
logger.error('Failed to reload workflows after folder delete', { error })
}
},
})
}
export function useDuplicateFolderMutation() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ id, workspaceId }: DuplicateFolderVariables) => {
const response = await fetch(`/api/folders/${id}/duplicate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId }),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to duplicate folder')
}
return response.json()
},
onSuccess: async (_data, variables) => {
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
try {
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
} catch (error) {
logger.error('Failed to reload workflows after folder duplicate', { error })
}
},
})
}

View File

@@ -57,11 +57,9 @@ async function fetchGeneralSettings(): Promise<GeneralSettings> {
* This ensures the rest of the app (which uses Zustand) stays in sync
*/
function syncSettingsToZustand(settings: GeneralSettings) {
const store = useGeneralStore.getState()
const { setSettings } = useGeneralStore.getState()
// Update Zustand store to match React Query cache
// This allows the rest of the app to continue using Zustand for reading values
useGeneralStore.setState({
setSettings({
isAutoConnectEnabled: settings.autoConnect,
isAutoPanEnabled: settings.autoPan,
isConsoleExpandedByDefault: settings.consoleExpandedByDefault,

View File

@@ -0,0 +1,297 @@
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { createLogger } from '@/lib/logs/console/logger'
import type {
ChunkData,
ChunksPagination,
DocumentData,
DocumentsPagination,
KnowledgeBaseData,
} from '@/stores/knowledge/store'
const logger = createLogger('KnowledgeQueries')
export const knowledgeKeys = {
all: ['knowledge'] as const,
list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const,
detail: (knowledgeBaseId?: string) =>
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
documents: (knowledgeBaseId: string, paramsKey: string) =>
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
[
...knowledgeKeys.detail(knowledgeBaseId),
'document',
documentId,
'chunks',
paramsKey,
] as const,
}
export async function fetchKnowledgeBases(workspaceId?: string): Promise<KnowledgeBaseData[]> {
const url = workspaceId ? `/api/knowledge?workspaceId=${workspaceId}` : '/api/knowledge'
const response = await fetch(url)
if (!response.ok) {
throw new Error(`Failed to fetch knowledge bases: ${response.status} ${response.statusText}`)
}
const result = await response.json()
if (result?.success === false) {
throw new Error(result.error || 'Failed to fetch knowledge bases')
}
return Array.isArray(result?.data) ? result.data : []
}
export async function fetchKnowledgeBase(knowledgeBaseId: string): Promise<KnowledgeBaseData> {
const response = await fetch(`/api/knowledge/${knowledgeBaseId}`)
if (!response.ok) {
throw new Error(`Failed to fetch knowledge base: ${response.status} ${response.statusText}`)
}
const result = await response.json()
if (!result?.success || !result?.data) {
throw new Error(result?.error || 'Failed to fetch knowledge base')
}
return result.data
}
export interface KnowledgeDocumentsParams {
knowledgeBaseId: string
search?: string
limit?: number
offset?: number
sortBy?: string
sortOrder?: string
}
export interface KnowledgeDocumentsResponse {
documents: DocumentData[]
pagination: DocumentsPagination
}
export async function fetchKnowledgeDocuments({
knowledgeBaseId,
search,
limit = 50,
offset = 0,
sortBy,
sortOrder,
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
const params = new URLSearchParams()
if (search) params.set('search', search)
if (sortBy) params.set('sortBy', sortBy)
if (sortOrder) params.set('sortOrder', sortOrder)
params.set('limit', limit.toString())
params.set('offset', offset.toString())
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
const response = await fetch(url)
if (!response.ok) {
throw new Error(`Failed to fetch documents: ${response.status} ${response.statusText}`)
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to fetch documents')
}
const documents: DocumentData[] = result.data?.documents ?? result.data ?? []
const pagination: DocumentsPagination = result.data?.pagination ??
result.pagination ?? {
total: documents.length,
limit,
offset,
hasMore: false,
}
return {
documents,
pagination: {
total: pagination.total ?? documents.length,
limit: pagination.limit ?? limit,
offset: pagination.offset ?? offset,
hasMore: Boolean(pagination.hasMore),
},
}
}
export interface KnowledgeChunksParams {
knowledgeBaseId: string
documentId: string
search?: string
limit?: number
offset?: number
}
export interface KnowledgeChunksResponse {
chunks: ChunkData[]
pagination: ChunksPagination
}
export async function fetchKnowledgeChunks({
knowledgeBaseId,
documentId,
search,
limit = 50,
offset = 0,
}: KnowledgeChunksParams): Promise<KnowledgeChunksResponse> {
const params = new URLSearchParams()
if (search) params.set('search', search)
if (limit) params.set('limit', limit.toString())
if (offset) params.set('offset', offset.toString())
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks${params.toString() ? `?${params.toString()}` : ''}`
)
if (!response.ok) {
throw new Error(`Failed to fetch chunks: ${response.status} ${response.statusText}`)
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to fetch chunks')
}
const chunks: ChunkData[] = result.data ?? []
const pagination: ChunksPagination = {
total: result.pagination?.total ?? chunks.length,
limit: result.pagination?.limit ?? limit,
offset: result.pagination?.offset ?? offset,
hasMore: Boolean(result.pagination?.hasMore),
}
return { chunks, pagination }
}
export function useKnowledgeBasesQuery(
workspaceId?: string,
options?: {
enabled?: boolean
}
) {
return useQuery({
queryKey: knowledgeKeys.list(workspaceId),
queryFn: () => fetchKnowledgeBases(workspaceId),
enabled: options?.enabled ?? true,
staleTime: 60 * 1000,
placeholderData: keepPreviousData,
})
}
export function useKnowledgeBaseQuery(knowledgeBaseId?: string) {
return useQuery({
queryKey: knowledgeKeys.detail(knowledgeBaseId),
queryFn: () => fetchKnowledgeBase(knowledgeBaseId as string),
enabled: Boolean(knowledgeBaseId),
staleTime: 60 * 1000,
})
}
export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
JSON.stringify({
search: params.search ?? '',
limit: params.limit ?? 50,
offset: params.offset ?? 0,
sortBy: params.sortBy ?? '',
sortOrder: params.sortOrder ?? '',
})
export function useKnowledgeDocumentsQuery(
params: KnowledgeDocumentsParams,
options?: {
enabled?: boolean
}
) {
const paramsKey = serializeDocumentParams(params)
return useQuery({
queryKey: knowledgeKeys.documents(params.knowledgeBaseId, paramsKey),
queryFn: () => fetchKnowledgeDocuments(params),
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId),
placeholderData: keepPreviousData,
})
}
export const serializeChunkParams = (params: KnowledgeChunksParams) =>
JSON.stringify({
search: params.search ?? '',
limit: params.limit ?? 50,
offset: params.offset ?? 0,
})
export function useKnowledgeChunksQuery(
params: KnowledgeChunksParams,
options?: {
enabled?: boolean
}
) {
const paramsKey = serializeChunkParams(params)
return useQuery({
queryKey: knowledgeKeys.chunks(params.knowledgeBaseId, params.documentId, paramsKey),
queryFn: () => fetchKnowledgeChunks(params),
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId && params.documentId),
placeholderData: keepPreviousData,
})
}
interface UpdateDocumentPayload {
knowledgeBaseId: string
documentId: string
updates: Partial<DocumentData>
}
export function useMutateKnowledgeDocument() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ knowledgeBaseId, documentId, updates }: UpdateDocumentPayload) => {
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(updates),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
throw new Error(errorData.error || 'Failed to update document')
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to update document')
}
return result
},
onMutate: async ({ knowledgeBaseId, documentId, updates }) => {
await queryClient.cancelQueries({ queryKey: knowledgeKeys.detail(knowledgeBaseId) })
const documentQueries = queryClient
.getQueriesData<KnowledgeDocumentsResponse>({
queryKey: knowledgeKeys.detail(knowledgeBaseId),
})
.filter(([key]) => Array.isArray(key) && key.includes('documents'))
documentQueries.forEach(([key, data]) => {
if (!data) return
queryClient.setQueryData(key, {
...data,
documents: data.documents.map((doc) =>
doc.id === documentId ? { ...doc, ...updates } : doc
),
})
})
},
onError: (error) => {
logger.error('Failed to mutate document', error)
},
onSettled: (_data, _error, variables) => {
queryClient.invalidateQueries({ queryKey: knowledgeKeys.detail(variables.knowledgeBaseId) })
},
})
}

View File

@@ -0,0 +1,36 @@
import { useQuery } from '@tanstack/react-query'
import { createLogger } from '@/lib/logs/console/logger'
import type { ProviderName } from '@/stores/providers/types'
const logger = createLogger('ProviderModelsQuery')
const providerEndpoints: Record<ProviderName, string> = {
base: '/api/providers/base/models',
ollama: '/api/providers/ollama/models',
openrouter: '/api/providers/openrouter/models',
}
async function fetchProviderModels(provider: ProviderName): Promise<string[]> {
const response = await fetch(providerEndpoints[provider])
if (!response.ok) {
logger.warn(`Failed to fetch ${provider} models`, {
status: response.status,
statusText: response.statusText,
})
throw new Error(`Failed to fetch ${provider} models`)
}
const data = await response.json()
const models: string[] = Array.isArray(data.models) ? data.models : []
return provider === 'openrouter' ? Array.from(new Set(models)) : models
}
export function useProviderModels(provider: ProviderName) {
return useQuery({
queryKey: ['provider-models', provider],
queryFn: () => fetchProviderModels(provider),
staleTime: 5 * 60 * 1000,
})
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,42 @@
import { API_ENDPOINTS } from '@/stores/constants'
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
export interface WorkspaceEnvironmentData {
workspace: Record<string, string>
personal: Record<string, string>
conflicts: string[]
}
export async function fetchPersonalEnvironment(): Promise<Record<string, EnvironmentVariable>> {
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
if (!response.ok) {
throw new Error(`Failed to load environment variables: ${response.statusText}`)
}
const { data } = await response.json()
if (data && typeof data === 'object') {
return data
}
return {}
}
export async function fetchWorkspaceEnvironment(
workspaceId: string
): Promise<WorkspaceEnvironmentData> {
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
if (!response.ok) {
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
}
const { data } = await response.json()
return {
workspace: data.workspace || {},
personal: data.personal || {},
conflicts: data.conflicts || [],
}
}

View File

@@ -78,13 +78,13 @@ export const ollamaProvider: ProviderConfig = {
try {
const response = await fetch(`${OLLAMA_HOST}/api/tags`)
if (!response.ok) {
useProvidersStore.getState().setModels('ollama', [])
useProvidersStore.getState().setProviderModels('ollama', [])
logger.warn('Ollama service is not available. The provider will be disabled.')
return
}
const data = (await response.json()) as ModelsObject
this.models = data.models.map((model) => model.name)
useProvidersStore.getState().setModels('ollama', this.models)
useProvidersStore.getState().setProviderModels('ollama', this.models)
} catch (error) {
logger.warn('Ollama model instantiation failed. The provider will be disabled.', {
error: error instanceof Error ? error.message : 'Unknown error',

View File

@@ -1,25 +1,12 @@
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger'
import { withOptimisticUpdate } from '@/lib/utils'
import type { CustomToolsState, CustomToolsStore } from './types'
const logger = createLogger('CustomToolsStore')
const API_ENDPOINT = '/api/tools/custom'
class ApiError extends Error {
status: number
constructor(message: string, status: number) {
super(message)
this.status = status
this.name = 'ApiError'
}
}
const initialState: CustomToolsState = {
tools: [],
isLoading: false,
error: null,
}
export const useCustomToolsStore = create<CustomToolsStore>()(
@@ -27,218 +14,9 @@ export const useCustomToolsStore = create<CustomToolsStore>()(
(set, get) => ({
...initialState,
fetchTools: async (workspaceId: string) => {
set({ isLoading: true, error: null })
try {
logger.info(`Fetching custom tools for workspace ${workspaceId}`)
const response = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
throw new Error(
errorData.error || `Failed to fetch custom tools: ${response.statusText}`
)
}
const { data } = await response.json()
if (!Array.isArray(data)) {
throw new Error('Invalid response format')
}
// Filter and validate tools
const validTools = data.filter((tool, index) => {
if (!tool || typeof tool !== 'object') {
logger.warn(`Skipping invalid tool at index ${index}: not an object`)
return false
}
if (!tool.id || typeof tool.id !== 'string') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid id`)
return false
}
if (!tool.title || typeof tool.title !== 'string') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid title`)
return false
}
if (!tool.schema || typeof tool.schema !== 'object') {
logger.warn(`Skipping invalid tool at index ${index}: missing or invalid schema`)
return false
}
if (!tool.code || typeof tool.code !== 'string') {
logger.warn(`Tool at index ${index} missing code field, defaulting to empty string`)
tool.code = ''
}
return true
})
set({
tools: validTools,
isLoading: false,
})
logger.info(`Fetched ${validTools.length} custom tools for workspace ${workspaceId}`)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch tools'
logger.error('Error fetching custom tools:', error)
set({
error: errorMessage,
isLoading: false,
})
}
},
createTool: async (workspaceId: string, tool) => {
set({ isLoading: true, error: null })
try {
logger.info(`Creating custom tool: ${tool.title} in workspace ${workspaceId}`)
const response = await fetch(API_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
tools: [
{
title: tool.title,
schema: tool.schema,
code: tool.code,
},
],
workspaceId,
}),
})
const data = await response.json()
if (!response.ok) {
throw new ApiError(data.error || 'Failed to create tool', response.status)
}
if (!data.data || !Array.isArray(data.data)) {
throw new Error('Invalid API response: missing tools data')
}
set({ tools: data.data, isLoading: false })
const createdTool = get().tools.find((t) => t.title === tool.title)
if (!createdTool) {
throw new Error('Failed to retrieve created tool')
}
logger.info(`Created custom tool: ${createdTool.id}`)
return createdTool
} catch (error) {
logger.error('Error creating custom tool:', error)
set({ isLoading: false })
throw error
}
},
updateTool: async (workspaceId: string, id: string, updates) => {
const tool = get().tools.find((t) => t.id === id)
if (!tool) {
throw new Error('Tool not found')
}
await withOptimisticUpdate({
getCurrentState: () => get().tools,
optimisticUpdate: () => {
set((state) => ({
tools: state.tools.map((t) =>
t.id === id
? {
...t,
title: updates.title ?? t.title,
schema: updates.schema ?? t.schema,
code: updates.code ?? t.code,
}
: t
),
isLoading: true,
error: null,
}))
},
apiCall: async () => {
logger.info(`Updating custom tool: ${id} in workspace ${workspaceId}`)
const response = await fetch(API_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
tools: [
{
id,
title: updates.title ?? tool.title,
schema: updates.schema ?? tool.schema,
code: updates.code ?? tool.code,
},
],
workspaceId,
}),
})
const data = await response.json()
if (!response.ok) {
throw new ApiError(data.error || 'Failed to update tool', response.status)
}
if (!data.data || !Array.isArray(data.data)) {
throw new Error('Invalid API response: missing tools data')
}
set({ tools: data.data })
logger.info(`Updated custom tool: ${id}`)
},
rollback: (originalTools) => {
set({ tools: originalTools })
},
onComplete: () => {
set({ isLoading: false })
},
errorMessage: 'Error updating custom tool',
})
},
deleteTool: async (workspaceId: string | null, id: string) => {
await withOptimisticUpdate({
getCurrentState: () => get().tools,
optimisticUpdate: () => {
set((state) => ({
tools: state.tools.filter((tool) => tool.id !== id),
isLoading: true,
error: null,
}))
},
apiCall: async () => {
logger.info(`Deleting custom tool: ${id}`)
const url = workspaceId
? `${API_ENDPOINT}?id=${id}&workspaceId=${workspaceId}`
: `${API_ENDPOINT}?id=${id}`
const response = await fetch(url, {
method: 'DELETE',
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to delete tool')
}
logger.info(`Deleted custom tool: ${id}`)
},
rollback: (originalTools) => {
set({ tools: originalTools })
},
onComplete: () => {
set({ isLoading: false })
},
errorMessage: 'Error deleting custom tool',
})
setTools: (tools) => {
logger.info(`Synced ${tools.length} custom tools`)
set({ tools })
},
getTool: (id: string) => {
@@ -249,8 +27,6 @@ export const useCustomToolsStore = create<CustomToolsStore>()(
return get().tools
},
clearError: () => set({ error: null }),
reset: () => set(initialState),
}),
{

View File

@@ -24,27 +24,12 @@ export interface CustomToolDefinition {
export interface CustomToolsState {
tools: CustomToolDefinition[]
isLoading: boolean
error: string | null
}
export interface CustomToolsActions {
fetchTools: (workspaceId: string) => Promise<void>
createTool: (
workspaceId: string,
tool: Omit<CustomToolDefinition, 'id' | 'workspaceId' | 'userId' | 'createdAt' | 'updatedAt'>
) => Promise<CustomToolDefinition>
updateTool: (
workspaceId: string,
id: string,
updates: Partial<
Omit<CustomToolDefinition, 'id' | 'workspaceId' | 'userId' | 'createdAt' | 'updatedAt'>
>
) => Promise<void>
deleteTool: (workspaceId: string | null, id: string) => Promise<void>
setTools: (tools: CustomToolDefinition[]) => void
getTool: (id: string) => CustomToolDefinition | undefined
getAllTools: () => CustomToolDefinition[]
clearError: () => void
reset: () => void
}

View File

@@ -1,8 +1,6 @@
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger'
import { withOptimisticUpdate } from '@/lib/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('FoldersStore')
@@ -36,16 +34,10 @@ export interface FolderTreeNode extends WorkflowFolder {
interface FolderState {
folders: Record<string, WorkflowFolder>
isLoading: boolean
expandedFolders: Set<string>
selectedWorkflows: Set<string>
// Actions
setFolders: (folders: WorkflowFolder[]) => void
addFolder: (folder: WorkflowFolder) => void
updateFolder: (id: string, updates: Partial<WorkflowFolder>) => void
removeFolder: (id: string) => void
setLoading: (loading: boolean) => void
toggleExpanded: (folderId: string) => void
setExpanded: (folderId: string, expanded: boolean) => void
@@ -63,29 +55,12 @@ interface FolderState {
getFolderById: (id: string) => WorkflowFolder | undefined
getChildFolders: (parentId: string | null) => WorkflowFolder[]
getFolderPath: (folderId: string) => WorkflowFolder[]
// API actions
fetchFolders: (workspaceId: string) => Promise<void>
createFolder: (data: {
name: string
workspaceId: string
parentId?: string
color?: string
}) => Promise<WorkflowFolder>
updateFolderAPI: (id: string, updates: Partial<WorkflowFolder>) => Promise<WorkflowFolder>
deleteFolder: (id: string, workspaceId: string) => Promise<void>
duplicateFolder: (id: string) => Promise<string | null>
// Helper functions
isWorkflowInDeletedSubfolder: (workflow: Workflow, deletedFolderId: string) => boolean
removeSubfoldersRecursively: (parentFolderId: string) => void
}
export const useFolderStore = create<FolderState>()(
devtools(
(set, get) => ({
folders: {},
isLoading: false,
expandedFolders: new Set(),
selectedWorkflows: new Set(),
@@ -100,28 +75,6 @@ export const useFolderStore = create<FolderState>()(
),
})),
addFolder: (folder) =>
set((state) => ({
folders: { ...state.folders, [folder.id]: folder },
})),
updateFolder: (id, updates) =>
set((state) => ({
folders: {
...state.folders,
[id]: state.folders[id] ? { ...state.folders[id], ...updates } : state.folders[id],
},
})),
removeFolder: (id) =>
set((state) => {
const newFolders = { ...state.folders }
delete newFolders[id]
return { folders: newFolders }
}),
setLoading: (loading) => set({ isLoading: loading }),
toggleExpanded: (folderId) =>
set((state) => {
const newExpanded = new Set(state.expandedFolders)
@@ -225,235 +178,6 @@ export const useFolderStore = create<FolderState>()(
return path
},
fetchFolders: async (workspaceId) => {
set({ isLoading: true })
try {
const response = await fetch(`/api/folders?workspaceId=${workspaceId}`)
if (!response.ok) {
throw new Error('Failed to fetch folders')
}
const { folders }: { folders: any[] } = await response.json()
// Convert date strings to Date objects
const processedFolders: WorkflowFolder[] = folders.map((folder: any) => ({
id: folder.id,
name: folder.name,
userId: folder.userId,
workspaceId: folder.workspaceId,
parentId: folder.parentId,
color: folder.color,
isExpanded: folder.isExpanded,
sortOrder: folder.sortOrder,
createdAt: new Date(folder.createdAt),
updatedAt: new Date(folder.updatedAt),
}))
get().setFolders(processedFolders)
// Start with all folders collapsed - only active workflow path will be expanded by the UI
set({ expandedFolders: new Set() })
} catch (error) {
logger.error('Error fetching folders:', error)
} finally {
set({ isLoading: false })
}
},
createFolder: async (data) => {
const response = await fetch('/api/folders', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data),
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to create folder')
}
const { folder } = await response.json()
const processedFolder = {
...folder,
createdAt: new Date(folder.createdAt),
updatedAt: new Date(folder.updatedAt),
}
get().addFolder(processedFolder)
return processedFolder
},
updateFolderAPI: async (id, updates) => {
const originalFolder = get().folders[id]
if (!originalFolder) {
throw new Error('Folder not found')
}
let updatedFolder: WorkflowFolder | null = null
await withOptimisticUpdate({
getCurrentState: () => originalFolder,
optimisticUpdate: () => {
get().updateFolder(id, { ...updates, updatedAt: new Date() })
},
apiCall: async () => {
const response = await fetch(`/api/folders/${id}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(updates),
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to update folder')
}
const { folder } = await response.json()
const processedFolder = {
...folder,
createdAt: new Date(folder.createdAt),
updatedAt: new Date(folder.updatedAt),
}
get().updateFolder(id, processedFolder)
updatedFolder = processedFolder
},
rollback: (original) => {
get().updateFolder(id, original)
},
errorMessage: 'Failed to update folder',
})
return updatedFolder || { ...originalFolder, ...updates }
},
deleteFolder: async (id: string, workspaceId: string) => {
const getAllSubfolderIds = (parentId: string): string[] => {
const folders = get().folders
const childIds = Object.keys(folders).filter(
(folderId) => folders[folderId].parentId === parentId
)
const allIds = [...childIds]
childIds.forEach((childId) => {
allIds.push(...getAllSubfolderIds(childId))
})
return allIds
}
const deletedFolderIds = [id, ...getAllSubfolderIds(id)]
await withOptimisticUpdate({
getCurrentState: () => ({
folders: { ...get().folders },
expandedFolders: new Set(get().expandedFolders),
}),
optimisticUpdate: () => {
deletedFolderIds.forEach((folderId) => {
get().removeFolder(folderId)
})
set((state) => {
const newExpanded = new Set(state.expandedFolders)
deletedFolderIds.forEach((folderId) => newExpanded.delete(folderId))
return { expandedFolders: newExpanded }
})
},
apiCall: async () => {
const response = await fetch(`/api/folders/${id}`, { method: 'DELETE' })
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to delete folder')
}
const responseData = await response.json()
logger.info(
`Deleted ${responseData.deletedItems.workflows} workflow(s) and ${responseData.deletedItems.folders} folder(s)`
)
const workflowRegistry = useWorkflowRegistry.getState()
await workflowRegistry.loadWorkflows(workspaceId)
},
rollback: (originalState) => {
set({ folders: originalState.folders, expandedFolders: originalState.expandedFolders })
},
errorMessage: 'Failed to delete folder',
})
},
duplicateFolder: async (id: string) => {
const sourceFolder = get().folders[id]
if (!sourceFolder) {
logger.error(`Folder ${id} not found`)
return null
}
try {
const response = await fetch(`/api/folders/${id}/duplicate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
name: `${sourceFolder.name} (Copy)`,
workspaceId: sourceFolder.workspaceId,
parentId: sourceFolder.parentId,
color: sourceFolder.color,
}),
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to duplicate folder')
}
const result = await response.json()
logger.info(
`Successfully duplicated folder ${id} to ${result.id} with ${result.foldersCount} folder(s) and ${result.workflowsCount} workflow(s)`
)
// Reload folders and workflows to reflect the duplication
const workflowRegistry = useWorkflowRegistry.getState()
await Promise.all([
get().fetchFolders(sourceFolder.workspaceId),
workflowRegistry.loadWorkflows(sourceFolder.workspaceId),
])
return result.id
} catch (error) {
logger.error(`Failed to duplicate folder ${id}:`, error)
throw error
}
},
isWorkflowInDeletedSubfolder: (workflow: Workflow, deletedFolderId: string) => {
if (!workflow.folderId) return false
const folders = get().folders
let currentFolderId: string | null = workflow.folderId
while (currentFolderId && folders[currentFolderId]) {
if (currentFolderId === deletedFolderId) {
return true
}
currentFolderId = folders[currentFolderId].parentId
}
return false
},
removeSubfoldersRecursively: (parentFolderId: string) => {
const folders = get().folders
const childFolderIds = Object.keys(folders).filter(
(id) => folders[id].parentId === parentFolderId
)
childFolderIds.forEach((childId) => {
get().removeSubfoldersRecursively(childId)
get().removeFolder(childId)
})
},
}),
{ name: 'folder-store' }
)

View File

@@ -1,50 +1,9 @@
import { create } from 'zustand'
import { createLogger } from '@/lib/logs/console/logger'
import { updateOllamaProviderModels, updateOpenRouterProviderModels } from '@/providers/utils'
import type { ProviderConfig, ProviderName, ProvidersStore } from './types'
import type { ProvidersStore } from './types'
const logger = createLogger('ProvidersStore')
const PROVIDER_CONFIGS: Record<ProviderName, ProviderConfig> = {
base: {
apiEndpoint: '/api/providers/base/models',
dedupeModels: true,
updateFunction: () => {},
},
ollama: {
apiEndpoint: '/api/providers/ollama/models',
updateFunction: updateOllamaProviderModels,
},
openrouter: {
apiEndpoint: '/api/providers/openrouter/models',
dedupeModels: true,
updateFunction: updateOpenRouterProviderModels,
},
}
const fetchProviderModels = async (provider: ProviderName): Promise<string[]> => {
try {
const config = PROVIDER_CONFIGS[provider]
const response = await fetch(config.apiEndpoint)
if (!response.ok) {
logger.warn(`Failed to fetch ${provider} models from API`, {
status: response.status,
statusText: response.statusText,
})
return []
}
const data = await response.json()
return data.models || []
} catch (error) {
logger.error(`Error fetching ${provider} models`, {
error: error instanceof Error ? error.message : 'Unknown error',
})
return []
}
}
export const useProvidersStore = create<ProvidersStore>((set, get) => ({
providers: {
base: { models: [], isLoading: false },
@@ -52,86 +11,32 @@ export const useProvidersStore = create<ProvidersStore>((set, get) => ({
openrouter: { models: [], isLoading: false },
},
setModels: (provider, models) => {
const config = PROVIDER_CONFIGS[provider]
const processedModels = config.dedupeModels ? Array.from(new Set(models)) : models
setProviderModels: (provider, models) => {
logger.info(`Updated ${provider} models`, { count: models.length })
set((state) => ({
providers: {
...state.providers,
[provider]: {
...state.providers[provider],
models: processedModels,
models,
},
},
}))
config.updateFunction(models)
},
fetchModels: async (provider) => {
if (typeof window === 'undefined') {
logger.info(`Skipping client-side ${provider} model fetch on server`)
return
}
const currentState = get().providers[provider]
if (currentState.isLoading) {
logger.info(`${provider} model fetch already in progress`)
return
}
if (currentState.models.length > 0) {
logger.info(`Skipping ${provider} model fetch - models already loaded`)
return
}
logger.info(`Fetching ${provider} models from API`)
setProviderLoading: (provider, isLoading) => {
set((state) => ({
providers: {
...state.providers,
[provider]: {
...state.providers[provider],
isLoading: true,
isLoading,
},
},
}))
try {
const models = await fetchProviderModels(provider)
logger.info(`Successfully fetched ${provider} models`, {
count: models.length,
...(provider === 'ollama' ? { models } : {}),
})
get().setModels(provider, models)
} catch (error) {
logger.error(`Failed to fetch ${provider} models`, {
error: error instanceof Error ? error.message : 'Unknown error',
})
} finally {
set((state) => ({
providers: {
...state.providers,
[provider]: {
...state.providers[provider],
isLoading: false,
},
},
}))
}
},
getProvider: (provider) => {
return get().providers[provider]
},
}))
if (typeof window !== 'undefined') {
setTimeout(() => {
const store = useProvidersStore.getState()
store.fetchModels('base')
store.fetchModels('ollama')
store.fetchModels('openrouter')
}, 1000)
}

View File

@@ -7,13 +7,7 @@ export interface ProviderState {
export interface ProvidersStore {
providers: Record<ProviderName, ProviderState>
setModels: (provider: ProviderName, models: string[]) => void
fetchModels: (provider: ProviderName) => Promise<void>
setProviderModels: (provider: ProviderName, models: string[]) => void
setProviderLoading: (provider: ProviderName, isLoading: boolean) => void
getProvider: (provider: ProviderName) => ProviderState
}
export interface ProviderConfig {
apiEndpoint: string
dedupeModels?: boolean
updateFunction: (models: string[]) => void | Promise<void>
}

View File

@@ -1,12 +1,7 @@
import { create } from 'zustand'
import { fetchPersonalEnvironment } from '@/lib/environment/api'
import { createLogger } from '@/lib/logs/console/logger'
import { withOptimisticUpdate } from '@/lib/utils'
import { API_ENDPOINTS } from '@/stores/constants'
import type {
CachedWorkspaceEnvData,
EnvironmentStore,
EnvironmentVariable,
} from '@/stores/settings/environment/types'
import type { EnvironmentStore, EnvironmentVariable } from '@/stores/settings/environment/types'
const logger = createLogger('EnvironmentStore')
@@ -14,187 +9,35 @@ export const useEnvironmentStore = create<EnvironmentStore>()((set, get) => ({
variables: {},
isLoading: false,
error: null,
workspaceEnvCache: new Map<string, CachedWorkspaceEnvData>(),
loadEnvironmentVariables: async () => {
try {
set({ isLoading: true, error: null })
const response = await fetch(API_ENDPOINTS.ENVIRONMENT)
if (!response.ok) {
throw new Error(`Failed to load environment variables: ${response.statusText}`)
}
const { data } = await response.json()
if (data && typeof data === 'object') {
set({
variables: data,
isLoading: false,
})
} else {
set({
variables: {},
isLoading: false,
})
}
const data = await fetchPersonalEnvironment()
set({ variables: data, isLoading: false })
} catch (error) {
logger.error('Error loading environment variables:', { error })
set({
error: error instanceof Error ? error.message : 'Unknown error',
isLoading: false,
})
throw error
}
},
saveEnvironmentVariables: async (variables: Record<string, string>) => {
const transformedVariables = Object.entries(variables).reduce(
(acc, [key, value]) => ({
...acc,
[key]: { key, value },
}),
{}
)
await withOptimisticUpdate({
getCurrentState: () => get().variables,
optimisticUpdate: () => {
set({ variables: transformedVariables, isLoading: true, error: null })
},
apiCall: async () => {
const response = await fetch(API_ENDPOINTS.ENVIRONMENT, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
variables: Object.entries(transformedVariables).reduce(
(acc, [key, value]) => ({
...acc,
[key]: (value as EnvironmentVariable).value,
}),
{}
),
}),
})
if (!response.ok) {
throw new Error(`Failed to save environment variables: ${response.statusText}`)
}
get().clearWorkspaceEnvCache()
},
rollback: (originalVariables) => {
set({ variables: originalVariables })
},
onComplete: () => {
set({ isLoading: false })
},
errorMessage: 'Error saving environment variables',
})
setVariables: (variables: Record<string, EnvironmentVariable>) => {
set({ variables })
},
loadWorkspaceEnvironment: async (workspaceId: string) => {
const cached = get().workspaceEnvCache.get(workspaceId)
if (cached) {
return {
workspace: cached.workspace,
personal: cached.personal,
conflicts: cached.conflicts,
}
}
try {
set({ isLoading: true, error: null })
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId))
if (!response.ok) {
throw new Error(`Failed to load workspace environment: ${response.statusText}`)
}
const { data } = await response.json()
const envData = data as {
workspace: Record<string, string>
personal: Record<string, string>
conflicts: string[]
}
const cache = new Map(get().workspaceEnvCache)
cache.set(workspaceId, {
...envData,
cachedAt: Date.now(),
})
set({ workspaceEnvCache: cache, isLoading: false })
return envData
} catch (error) {
logger.error('Error loading workspace environment:', { error })
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
return { workspace: {}, personal: {}, conflicts: [] }
}
},
upsertWorkspaceEnvironment: async (workspaceId: string, variables: Record<string, string>) => {
try {
set({ isLoading: true, error: null })
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId), {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ variables }),
})
if (!response.ok) {
throw new Error(`Failed to update workspace environment: ${response.statusText}`)
}
set({ isLoading: false })
get().clearWorkspaceEnvCache(workspaceId)
} catch (error) {
logger.error('Error updating workspace environment:', { error })
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
}
},
removeWorkspaceEnvironmentKeys: async (workspaceId: string, keys: string[]) => {
try {
set({ isLoading: true, error: null })
const response = await fetch(API_ENDPOINTS.WORKSPACE_ENVIRONMENT(workspaceId), {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ keys }),
})
if (!response.ok) {
throw new Error(`Failed to remove workspace environment keys: ${response.statusText}`)
}
set({ isLoading: false })
get().clearWorkspaceEnvCache(workspaceId)
} catch (error) {
logger.error('Error removing workspace environment keys:', { error })
set({ error: error instanceof Error ? error.message : 'Unknown error', isLoading: false })
}
},
getAllVariables: (): Record<string, EnvironmentVariable> => {
getAllVariables: () => {
return get().variables
},
clearWorkspaceEnvCache: (workspaceId?: string) => {
const cache = new Map(get().workspaceEnvCache)
if (workspaceId) {
cache.delete(workspaceId)
set({ workspaceEnvCache: cache })
} else {
set({ workspaceEnvCache: new Map() })
}
},
reset: () => {
set({
variables: {},
isLoading: false,
error: null,
workspaceEnvCache: new Map(),
})
},
}))

View File

@@ -14,25 +14,11 @@ export interface EnvironmentState {
variables: Record<string, EnvironmentVariable>
isLoading: boolean
error: string | null
workspaceEnvCache: Map<string, CachedWorkspaceEnvData>
}
export interface EnvironmentStore extends EnvironmentState {
loadEnvironmentVariables: () => Promise<void>
saveEnvironmentVariables: (variables: Record<string, string>) => Promise<void>
loadWorkspaceEnvironment: (workspaceId: string) => Promise<{
workspace: Record<string, string>
personal: Record<string, string>
conflicts: string[]
}>
upsertWorkspaceEnvironment: (
workspaceId: string,
variables: Record<string, string>
) => Promise<void>
removeWorkspaceEnvironmentKeys: (workspaceId: string, keys: string[]) => Promise<void>
setVariables: (variables: Record<string, EnvironmentVariable>) => void
getAllVariables: () => Record<string, EnvironmentVariable>
clearWorkspaceEnvCache: (workspaceId?: string) => void
reset: () => void
}

View File

@@ -1,294 +1,37 @@
import { create } from 'zustand'
import { devtools, persist } from 'zustand/middleware'
import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger'
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
// import { syncThemeToNextThemes } from '@/lib/theme-sync'
import { withOptimisticUpdate } from '@/lib/utils'
import type { General, GeneralStore, UserSettings } from '@/stores/settings/general/types'
import type { General, GeneralStore } from '@/stores/settings/general/types'
const logger = createLogger('GeneralStore')
const CACHE_TIMEOUT = 3600000 // 1 hour - settings rarely change
const MAX_ERROR_RETRIES = 2
const initialState: General = {
isAutoConnectEnabled: true,
isAutoPanEnabled: true,
isConsoleExpandedByDefault: true,
showFloatingControls: true,
showTrainingControls: false,
superUserModeEnabled: true,
theme: 'system',
telemetryEnabled: true,
isBillingUsageNotificationsEnabled: true,
}
export const useGeneralStore = create<GeneralStore>()(
devtools(
persist(
(set, get) => {
let lastLoadTime = 0
let errorRetryCount = 0
let hasLoadedFromDb = false // Track if we've loaded from DB in this session
const store: General = {
isAutoConnectEnabled: true,
isAutoPanEnabled: true,
isConsoleExpandedByDefault: true,
showFloatingControls: true,
showTrainingControls: false,
superUserModeEnabled: true,
theme: 'system' as const, // Keep for compatibility but not used
telemetryEnabled: true,
isLoading: false,
error: null,
// Individual loading states
isAutoConnectLoading: false,
isAutoPanLoading: false,
isConsoleExpandedByDefaultLoading: false,
isThemeLoading: false, // Keep for compatibility but not used
isTelemetryLoading: false,
isBillingUsageNotificationsLoading: false,
isBillingUsageNotificationsEnabled: true,
isFloatingControlsLoading: false,
isTrainingControlsLoading: false,
isSuperUserModeLoading: false,
}
const updateSettingOptimistic = async <K extends keyof UserSettings>(
key: K,
value: UserSettings[K],
loadingKey: keyof General,
stateKey: keyof General
) => {
if ((get() as any)[loadingKey]) return
await withOptimisticUpdate({
getCurrentState: () => (get() as any)[stateKey],
optimisticUpdate: () => set({ [stateKey]: value, [loadingKey]: true } as any),
apiCall: async () => {
await get().updateSetting(key, value)
},
rollback: (originalValue) => set({ [stateKey]: originalValue } as any),
onComplete: () => set({ [loadingKey]: false } as any),
errorMessage: `Failed to update ${String(key)}, rolled back`,
})
}
return {
...store,
toggleAutoConnect: async () => {
if (get().isAutoConnectLoading) return
const newValue = !get().isAutoConnectEnabled
await updateSettingOptimistic(
'autoConnect',
newValue,
'isAutoConnectLoading',
'isAutoConnectEnabled'
)
},
toggleAutoPan: async () => {
if (get().isAutoPanLoading) return
const newValue = !get().isAutoPanEnabled
await updateSettingOptimistic(
'autoPan',
newValue,
'isAutoPanLoading',
'isAutoPanEnabled'
)
},
toggleConsoleExpandedByDefault: async () => {
if (get().isConsoleExpandedByDefaultLoading) return
const newValue = !get().isConsoleExpandedByDefault
await updateSettingOptimistic(
'consoleExpandedByDefault',
newValue,
'isConsoleExpandedByDefaultLoading',
'isConsoleExpandedByDefault'
)
},
toggleFloatingControls: async () => {
if (get().isFloatingControlsLoading) return
const newValue = !get().showFloatingControls
await updateSettingOptimistic(
'showFloatingControls',
newValue,
'isFloatingControlsLoading',
'showFloatingControls'
)
},
toggleTrainingControls: async () => {
if (get().isTrainingControlsLoading) return
const newValue = !get().showTrainingControls
await updateSettingOptimistic(
'showTrainingControls',
newValue,
'isTrainingControlsLoading',
'showTrainingControls'
)
},
toggleSuperUserMode: async () => {
if (get().isSuperUserModeLoading) return
const newValue = !get().superUserModeEnabled
await updateSettingOptimistic(
'superUserModeEnabled',
newValue,
'isSuperUserModeLoading',
'superUserModeEnabled'
)
},
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
setTheme: async (theme) => {
if (get().isThemeLoading) return
// COMMENTED OUT: Dark mode is forced for workspace pages
// await withOptimisticUpdate({
// getCurrentState: () => get().theme,
// optimisticUpdate: () => {
// set({ theme, isThemeLoading: true })
// syncThemeToNextThemes(theme)
// },
// apiCall: async () => {
// await get().updateSetting('theme', theme)
// },
// rollback: (originalTheme) => {
// set({ theme: originalTheme })
// syncThemeToNextThemes(originalTheme)
// },
// onComplete: () => set({ isThemeLoading: false }),
// errorMessage: 'Failed to sync theme to database',
// })
},
setTelemetryEnabled: async (enabled) => {
if (get().isTelemetryLoading) return
await updateSettingOptimistic(
'telemetryEnabled',
enabled,
'isTelemetryLoading',
'telemetryEnabled'
)
},
setBillingUsageNotificationsEnabled: async (enabled: boolean) => {
if (get().isBillingUsageNotificationsLoading) return
await updateSettingOptimistic(
'isBillingUsageNotificationsEnabled',
enabled,
'isBillingUsageNotificationsLoading',
'isBillingUsageNotificationsEnabled'
)
},
// API Actions
loadSettings: async (force = false) => {
// Skip if we've already loaded from DB and not forcing
if (hasLoadedFromDb && !force) {
logger.debug('Already loaded settings from DB, using cached data')
return
}
// If we have persisted state and not forcing, check if we need to load
const persistedState = localStorage.getItem('general-settings')
if (persistedState && !force) {
try {
const parsed = JSON.parse(persistedState)
// If we have valid theme data, skip DB load unless forced
if (parsed.state?.theme) {
logger.debug('Using cached settings from localStorage')
hasLoadedFromDb = true // Mark as loaded to prevent future API calls
return
}
} catch (e) {
// If parsing fails, continue to load from DB
}
}
// Skip loading if on a chat path
if (typeof window !== 'undefined' && window.location.pathname.startsWith('/chat/')) {
logger.debug('Skipping settings load - on chat page')
return
}
// Skip loading if settings were recently loaded (within 5 seconds)
const now = Date.now()
if (!force && now - lastLoadTime < CACHE_TIMEOUT) {
logger.debug('Skipping settings load - recently loaded')
return
}
try {
set({ isLoading: true, error: null })
const response = await fetch('/api/users/me/settings')
if (!response.ok) {
throw new Error('Failed to fetch settings')
}
const { data } = await response.json()
set({
isAutoConnectEnabled: data.autoConnect,
isAutoPanEnabled: data.autoPan ?? true,
isConsoleExpandedByDefault: data.consoleExpandedByDefault ?? true,
showFloatingControls: data.showFloatingControls ?? true,
showTrainingControls: data.showTrainingControls ?? false,
superUserModeEnabled: data.superUserModeEnabled ?? true,
theme: data.theme || 'system',
telemetryEnabled: data.telemetryEnabled,
isBillingUsageNotificationsEnabled: data.billingUsageNotificationsEnabled ?? true,
isLoading: false,
})
// COMMENTED OUT: Theme switching disabled - dark mode is forced for workspace
// // Sync theme to next-themes if it's different
// if (data.theme && typeof window !== 'undefined') {
// const currentTheme = localStorage.getItem('sim-theme')
// if (currentTheme !== data.theme) {
// syncThemeToNextThemes(data.theme)
// }
// }
lastLoadTime = now
errorRetryCount = 0
hasLoadedFromDb = true
} catch (error) {
logger.error('Error loading settings:', error)
set({
error: error instanceof Error ? error.message : 'Unknown error',
isLoading: false,
})
}
},
updateSetting: async (key, value) => {
if (typeof window !== 'undefined' && window.location.pathname.startsWith('/chat/')) {
logger.debug(`Skipping setting update for ${key} on chat page`)
return
}
try {
const response = await fetch('/api/users/me/settings', {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ [key]: value }),
})
if (!response.ok) {
throw new Error(`Failed to update setting: ${key}`)
}
set({ error: null })
lastLoadTime = Date.now()
errorRetryCount = 0
} catch (error) {
logger.error(`Error updating setting ${key}:`, error)
set({ error: error instanceof Error ? error.message : 'Unknown error' })
// Don't auto-retry on individual setting updates to avoid conflicts
throw error
}
},
}
(set) => ({
...initialState,
setSettings: (settings) => {
logger.debug('Updating general settings store', {
keys: Object.keys(settings),
})
set((state) => ({
...state,
...settings,
}))
},
{
name: 'general-settings',
}
),
reset: () => set(initialState),
}),
{ name: 'general-store' }
)
)

View File

@@ -7,36 +7,14 @@ export interface General {
superUserModeEnabled: boolean
theme: 'system' | 'light' | 'dark'
telemetryEnabled: boolean
isLoading: boolean
error: string | null
isAutoConnectLoading: boolean
isAutoPanLoading: boolean
isConsoleExpandedByDefaultLoading: boolean
isThemeLoading: boolean
isTelemetryLoading: boolean
isBillingUsageNotificationsLoading: boolean
isBillingUsageNotificationsEnabled: boolean
isFloatingControlsLoading: boolean
isTrainingControlsLoading: boolean
isSuperUserModeLoading: boolean
}
export interface GeneralActions {
toggleAutoConnect: () => Promise<void>
toggleAutoPan: () => Promise<void>
toggleConsoleExpandedByDefault: () => Promise<void>
toggleFloatingControls: () => Promise<void>
toggleTrainingControls: () => Promise<void>
toggleSuperUserMode: () => Promise<void>
setTheme: (theme: 'system' | 'light' | 'dark') => Promise<void>
setTelemetryEnabled: (enabled: boolean) => Promise<void>
setBillingUsageNotificationsEnabled: (enabled: boolean) => Promise<void>
loadSettings: (force?: boolean) => Promise<void>
updateSetting: <K extends keyof UserSettings>(key: K, value: UserSettings[K]) => Promise<void>
export interface GeneralStore extends General {
setSettings: (settings: Partial<General>) => void
reset: () => void
}
export type GeneralStore = General & GeneralActions
export type UserSettings = {
theme: 'system' | 'light' | 'dark'
autoConnect: boolean