diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx index 0bce7c588..ef1744d21 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/create-chunk-modal/create-chunk-modal.tsx @@ -2,7 +2,6 @@ import { useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { Button, Label, @@ -14,7 +13,7 @@ import { Textarea, } from '@/components/emcn' import type { DocumentData } from '@/lib/knowledge/types' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useCreateChunk } from '@/hooks/queries/knowledge' const logger = createLogger('CreateChunkModal') @@ -31,16 +30,15 @@ export function CreateChunkModal({ document, knowledgeBaseId, }: CreateChunkModalProps) { - const queryClient = useQueryClient() + const { mutate: createChunk, isPending: isCreating, error: mutationError } = useCreateChunk() const [content, setContent] = useState('') - const [isCreating, setIsCreating] = useState(false) - const [error, setError] = useState(null) const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false) const isProcessingRef = useRef(false) + const error = mutationError?.message ?? null const hasUnsavedChanges = content.trim().length > 0 - const handleCreateChunk = async () => { + const handleCreateChunk = () => { if (!document || content.trim().length === 0 || isProcessingRef.current) { if (isProcessingRef.current) { logger.warn('Chunk creation already in progress, ignoring duplicate request') @@ -48,56 +46,30 @@ export function CreateChunkModal({ return } - try { - isProcessingRef.current = true - setIsCreating(true) - setError(null) + isProcessingRef.current = true - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - content: content.trim(), - enabled: true, - }), - } - ) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to create chunk') + createChunk( + { + knowledgeBaseId, + documentId: document.id, + content: content.trim(), + enabled: true, + }, + { + onSuccess: () => { + isProcessingRef.current = false + onClose() + }, + onError: () => { + isProcessingRef.current = false + }, } - - const result = await response.json() - - if (result.success && result.data) { - logger.info('Chunk created successfully:', result.data.id) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - onClose() - } else { - throw new Error(result.error || 'Failed to create chunk') - } - } catch (err) { - logger.error('Error creating chunk:', err) - setError(err instanceof Error ? err.message : 'An error occurred') - } finally { - isProcessingRef.current = false - setIsCreating(false) - } + ) } const onClose = () => { onOpenChange(false) setContent('') - setError(null) setShowUnsavedChangesAlert(false) } diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx index ff841ddec..fcebce6b8 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/delete-chunk-modal/delete-chunk-modal.tsx @@ -1,13 +1,8 @@ 'use client' -import { useState } from 'react' -import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn' import type { ChunkData } from '@/lib/knowledge/types' -import { knowledgeKeys } from '@/hooks/queries/knowledge' - -const logger = createLogger('DeleteChunkModal') +import { useDeleteChunk } from '@/hooks/queries/knowledge' interface DeleteChunkModalProps { chunk: ChunkData | null @@ -24,44 +19,12 @@ export function DeleteChunkModal({ isOpen, onClose, }: DeleteChunkModalProps) { - const queryClient = useQueryClient() - const [isDeleting, setIsDeleting] = useState(false) + const { mutate: deleteChunk, isPending: isDeleting } = useDeleteChunk() - const handleDeleteChunk = async () => { + const handleDeleteChunk = () => { if (!chunk || isDeleting) return - try { - setIsDeleting(true) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunk.id}`, - { - method: 'DELETE', - } - ) - - if (!response.ok) { - throw new Error('Failed to delete chunk') - } - - const result = await response.json() - - if (result.success) { - logger.info('Chunk deleted successfully:', chunk.id) - - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - onClose() - } else { - throw new Error(result.error || 'Failed to delete chunk') - } - } catch (err) { - logger.error('Error deleting chunk:', err) - } finally { - setIsDeleting(false) - } + deleteChunk({ knowledgeBaseId, documentId, chunkId: chunk.id }, { onSuccess: onClose }) } if (!chunk) return null diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx index 60aa328f3..ea2e456e2 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components/edit-chunk-modal/edit-chunk-modal.tsx @@ -2,7 +2,6 @@ import { useEffect, useMemo, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { ChevronDown, ChevronUp } from 'lucide-react' import { Button, @@ -19,7 +18,7 @@ import { import type { ChunkData, DocumentData } from '@/lib/knowledge/types' import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { useUpdateChunk } from '@/hooks/queries/knowledge' const logger = createLogger('EditChunkModal') @@ -50,17 +49,17 @@ export function EditChunkModal({ onNavigateToPage, maxChunkSize, }: EditChunkModalProps) { - const queryClient = useQueryClient() const userPermissions = useUserPermissionsContext() + const { mutate: updateChunk, isPending: isSaving, error: mutationError } = useUpdateChunk() const [editedContent, setEditedContent] = useState(chunk?.content || '') - const [isSaving, setIsSaving] = useState(false) const [isNavigating, setIsNavigating] = useState(false) - const [error, setError] = useState(null) const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false) const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null) const [tokenizerOn, setTokenizerOn] = useState(false) const textareaRef = useRef(null) + const error = mutationError?.message ?? null + const hasUnsavedChanges = editedContent !== (chunk?.content || '') const tokenStrings = useMemo(() => { @@ -102,44 +101,15 @@ export function EditChunkModal({ const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1 const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages - const handleSaveContent = async () => { + const handleSaveContent = () => { if (!chunk || !document) return - try { - setIsSaving(true) - setError(null) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${document.id}/chunks/${chunk.id}`, - { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - content: editedContent, - }), - } - ) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to update chunk') - } - - const result = await response.json() - - if (result.success) { - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - } - } catch (err) { - logger.error('Error updating chunk:', err) - setError(err instanceof Error ? err.message : 'An error occurred') - } finally { - setIsSaving(false) - } + updateChunk({ + knowledgeBaseId, + documentId: document.id, + chunkId: chunk.id, + content: editedContent, + }) } const navigateToChunk = async (direction: 'prev' | 'next') => { @@ -165,7 +135,6 @@ export function EditChunkModal({ } } catch (err) { logger.error(`Error navigating ${direction}:`, err) - setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`) } finally { setIsNavigating(false) } diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx index 7c724a177..9fbb90cb4 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/document.tsx @@ -48,7 +48,13 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/componen import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks' import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/kb/use-knowledge' -import { knowledgeKeys, useDocumentChunkSearchQuery } from '@/hooks/queries/knowledge' +import { + knowledgeKeys, + useBulkChunkOperation, + useDeleteDocument, + useDocumentChunkSearchQuery, + useUpdateChunk, +} from '@/hooks/queries/knowledge' const logger = createLogger('Document') @@ -403,11 +409,13 @@ export function Document({ const [isCreateChunkModalOpen, setIsCreateChunkModalOpen] = useState(false) const [chunkToDelete, setChunkToDelete] = useState(null) const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false) - const [isBulkOperating, setIsBulkOperating] = useState(false) const [showDeleteDocumentDialog, setShowDeleteDocumentDialog] = useState(false) - const [isDeletingDocument, setIsDeletingDocument] = useState(false) const [contextMenuChunk, setContextMenuChunk] = useState(null) + const { mutate: updateChunkMutation } = useUpdateChunk() + const { mutate: deleteDocumentMutation, isPending: isDeletingDocument } = useDeleteDocument() + const { mutate: bulkChunkMutation, isPending: isBulkOperating } = useBulkChunkOperation() + const { isOpen: isContextMenuOpen, position: contextMenuPosition, @@ -440,36 +448,23 @@ export function Document({ setSelectedChunk(null) } - const handleToggleEnabled = async (chunkId: string) => { + const handleToggleEnabled = (chunkId: string) => { const chunk = displayChunks.find((c) => c.id === chunkId) if (!chunk) return - try { - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`, - { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - enabled: !chunk.enabled, - }), - } - ) - - if (!response.ok) { - throw new Error('Failed to update chunk') + updateChunkMutation( + { + knowledgeBaseId, + documentId, + chunkId, + enabled: !chunk.enabled, + }, + { + onSuccess: () => { + updateChunk(chunkId, { enabled: !chunk.enabled }) + }, } - - const result = await response.json() - - if (result.success) { - updateChunk(chunkId, { enabled: !chunk.enabled }) - } - } catch (err) { - logger.error('Error updating chunk:', err) - } + ) } const handleDeleteChunk = (chunkId: string) => { @@ -515,107 +510,69 @@ export function Document({ /** * Handles deleting the document */ - const handleDeleteDocument = async () => { + const handleDeleteDocument = () => { if (!documentData) return - try { - setIsDeletingDocument(true) - - const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete document') + deleteDocumentMutation( + { knowledgeBaseId, documentId }, + { + onSuccess: () => { + router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`) + }, } - - const result = await response.json() - - if (result.success) { - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) - - router.push(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`) - } else { - throw new Error(result.error || 'Failed to delete document') - } - } catch (err) { - logger.error('Error deleting document:', err) - setIsDeletingDocument(false) - } + ) } - const performBulkChunkOperation = async ( + const performBulkChunkOperation = ( operation: 'enable' | 'disable' | 'delete', chunks: ChunkData[] ) => { if (chunks.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch( - `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, - { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - operation, - chunkIds: chunks.map((chunk) => chunk.id), - }), - } - ) - - if (!response.ok) { - throw new Error(`Failed to ${operation} chunks`) + bulkChunkMutation( + { + knowledgeBaseId, + documentId, + operation, + chunkIds: chunks.map((chunk) => chunk.id), + }, + { + onSuccess: (result) => { + if (operation === 'delete') { + refreshChunks() + } else { + result.results.forEach((opResult) => { + if (opResult.operation === operation) { + opResult.chunkIds.forEach((chunkId: string) => { + updateChunk(chunkId, { enabled: operation === 'enable' }) + }) + } + }) + } + logger.info(`Successfully ${operation}d ${result.successCount} chunks`) + setSelectedChunks(new Set()) + }, } - - const result = await response.json() - - if (result.success) { - if (operation === 'delete') { - await refreshChunks() - } else { - result.data.results.forEach((opResult: any) => { - if (opResult.operation === operation) { - opResult.chunkIds.forEach((chunkId: string) => { - updateChunk(chunkId, { enabled: operation === 'enable' }) - }) - } - }) - } - - logger.info(`Successfully ${operation}d ${result.data.successCount} chunks`) - } - - setSelectedChunks(new Set()) - } catch (err) { - logger.error(`Error ${operation}ing chunks:`, err) - } finally { - setIsBulkOperating(false) - } + ) } - const handleBulkEnable = async () => { + const handleBulkEnable = () => { const chunksToEnable = displayChunks.filter( (chunk) => selectedChunks.has(chunk.id) && !chunk.enabled ) - await performBulkChunkOperation('enable', chunksToEnable) + performBulkChunkOperation('enable', chunksToEnable) } - const handleBulkDisable = async () => { + const handleBulkDisable = () => { const chunksToDisable = displayChunks.filter( (chunk) => selectedChunks.has(chunk.id) && chunk.enabled ) - await performBulkChunkOperation('disable', chunksToDisable) + performBulkChunkOperation('disable', chunksToDisable) } - const handleBulkDelete = async () => { + const handleBulkDelete = () => { const chunksToDelete = displayChunks.filter((chunk) => selectedChunks.has(chunk.id)) - await performBulkChunkOperation('delete', chunksToDelete) + performBulkChunkOperation('delete', chunksToDelete) } const selectedChunksList = displayChunks.filter((chunk) => selectedChunks.has(chunk.id)) diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx index da1f19e54..81d30f53d 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/[id]/base.tsx @@ -2,7 +2,6 @@ import { useCallback, useEffect, useRef, useState } from 'react' import { createLogger } from '@sim/logger' -import { useQueryClient } from '@tanstack/react-query' import { format } from 'date-fns' import { AlertCircle, @@ -62,7 +61,12 @@ import { type TagDefinition, useKnowledgeBaseTagDefinitions, } from '@/hooks/kb/use-knowledge-base-tag-definitions' -import { knowledgeKeys } from '@/hooks/queries/knowledge' +import { + useBulkDocumentOperation, + useDeleteDocument, + useDeleteKnowledgeBase, + useUpdateDocument, +} from '@/hooks/queries/knowledge' const logger = createLogger('KnowledgeBase') @@ -407,12 +411,17 @@ export function KnowledgeBase({ id, knowledgeBaseName: passedKnowledgeBaseName, }: KnowledgeBaseProps) { - const queryClient = useQueryClient() const params = useParams() const workspaceId = params.workspaceId as string const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false }) const userPermissions = useUserPermissionsContext() + const { mutate: updateDocumentMutation } = useUpdateDocument() + const { mutate: deleteDocumentMutation } = useDeleteDocument() + const { mutate: deleteKnowledgeBaseMutation, isPending: isDeleting } = + useDeleteKnowledgeBase(workspaceId) + const { mutate: bulkDocumentMutation, isPending: isBulkOperating } = useBulkDocumentOperation() + const [searchQuery, setSearchQuery] = useState('') const [showTagsModal, setShowTagsModal] = useState(false) @@ -427,8 +436,6 @@ export function KnowledgeBase({ const [selectedDocuments, setSelectedDocuments] = useState>(new Set()) const [showDeleteDialog, setShowDeleteDialog] = useState(false) const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false) - const [isDeleting, setIsDeleting] = useState(false) - const [isBulkOperating, setIsBulkOperating] = useState(false) const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false) const [documentToDelete, setDocumentToDelete] = useState(null) const [showBulkDeleteModal, setShowBulkDeleteModal] = useState(false) @@ -550,7 +557,7 @@ export function KnowledgeBase({ /** * Checks for documents with stale processing states and marks them as failed */ - const checkForDeadProcesses = async () => { + const checkForDeadProcesses = () => { const now = new Date() const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes @@ -567,116 +574,79 @@ export function KnowledgeBase({ logger.warn(`Found ${staleDocuments.length} documents with dead processes`) - const markFailedPromises = staleDocuments.map(async (doc) => { - try { - const response = await fetch(`/api/knowledge/${id}/documents/${doc.id}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + staleDocuments.forEach((doc) => { + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: doc.id, + updates: { markFailedDueToTimeout: true }, + }, + { + onSuccess: () => { + logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`) }, - body: JSON.stringify({ - markFailedDueToTimeout: true, - }), - }) - - if (!response.ok) { - const errorData = await response.json().catch(() => ({ error: 'Unknown error' })) - logger.error(`Failed to mark document ${doc.id} as failed: ${errorData.error}`) - return } - - const result = await response.json() - if (result.success) { - logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`) - } - } catch (error) { - logger.error(`Error marking document ${doc.id} as failed:`, error) - } + ) }) - - await Promise.allSettled(markFailedPromises) } - const handleToggleEnabled = async (docId: string) => { + const handleToggleEnabled = (docId: string) => { const document = documents.find((doc) => doc.id === docId) if (!document) return const newEnabled = !document.enabled + // Optimistic update updateDocument(docId, { enabled: newEnabled }) - try { - const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: docId, + updates: { enabled: newEnabled }, + }, + { + onError: () => { + // Rollback on error + updateDocument(docId, { enabled: !newEnabled }) }, - body: JSON.stringify({ - enabled: newEnabled, - }), - }) - - if (!response.ok) { - throw new Error('Failed to update document') } - - const result = await response.json() - - if (!result.success) { - updateDocument(docId, { enabled: !newEnabled }) - } - } catch (err) { - updateDocument(docId, { enabled: !newEnabled }) - logger.error('Error updating document:', err) - } + ) } /** * Handles retrying a failed document processing */ - const handleRetryDocument = async (docId: string) => { - try { - updateDocument(docId, { - processingStatus: 'pending', - processingError: null, - processingStartedAt: null, - processingCompletedAt: null, - }) + const handleRetryDocument = (docId: string) => { + // Optimistic update + updateDocument(docId, { + processingStatus: 'pending', + processingError: null, + processingStartedAt: null, + processingCompletedAt: null, + }) - const response = await fetch(`/api/knowledge/${id}/documents/${docId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId: docId, + updates: { retryProcessing: true }, + }, + { + onSuccess: () => { + refreshDocuments() + logger.info(`Document retry initiated successfully for: ${docId}`) + }, + onError: (err) => { + logger.error('Error retrying document:', err) + updateDocument(docId, { + processingStatus: 'failed', + processingError: + err instanceof Error ? err.message : 'Failed to retry document processing', + }) }, - body: JSON.stringify({ - retryProcessing: true, - }), - }) - - if (!response.ok) { - throw new Error('Failed to retry document processing') } - - const result = await response.json() - - if (!result.success) { - throw new Error(result.error || 'Failed to retry document processing') - } - - await refreshDocuments() - - logger.info(`Document retry initiated successfully for: ${docId}`) - } catch (err) { - logger.error('Error retrying document:', err) - const currentDoc = documents.find((doc) => doc.id === docId) - if (currentDoc) { - updateDocument(docId, { - processingStatus: 'failed', - processingError: - err instanceof Error ? err.message : 'Failed to retry document processing', - }) - } - } + ) } /** @@ -694,43 +664,32 @@ export function KnowledgeBase({ const currentDoc = documents.find((doc) => doc.id === documentId) const previousName = currentDoc?.filename + // Optimistic update updateDocument(documentId, { filename: newName }) - queryClient.setQueryData(knowledgeKeys.document(id, documentId), (previous) => - previous ? { ...previous, filename: newName } : previous - ) - try { - const response = await fetch(`/api/knowledge/${id}/documents/${documentId}`, { - method: 'PUT', - headers: { - 'Content-Type': 'application/json', + return new Promise((resolve, reject) => { + updateDocumentMutation( + { + knowledgeBaseId: id, + documentId, + updates: { filename: newName }, }, - body: JSON.stringify({ filename: newName }), - }) - - if (!response.ok) { - const result = await response.json() - throw new Error(result.error || 'Failed to rename document') - } - - const result = await response.json() - - if (!result.success) { - throw new Error(result.error || 'Failed to rename document') - } - - logger.info(`Document renamed: ${documentId}`) - } catch (err) { - if (previousName !== undefined) { - updateDocument(documentId, { filename: previousName }) - queryClient.setQueryData( - knowledgeKeys.document(id, documentId), - (previous) => (previous ? { ...previous, filename: previousName } : previous) - ) - } - logger.error('Error renaming document:', err) - throw err - } + { + onSuccess: () => { + logger.info(`Document renamed: ${documentId}`) + resolve() + }, + onError: (err) => { + // Rollback on error + if (previousName !== undefined) { + updateDocument(documentId, { filename: previousName }) + } + logger.error('Error renaming document:', err) + reject(err) + }, + } + ) + }) } /** @@ -744,35 +703,26 @@ export function KnowledgeBase({ /** * Confirms and executes the deletion of a single document */ - const confirmDeleteDocument = async () => { + const confirmDeleteDocument = () => { if (!documentToDelete) return - try { - const response = await fetch(`/api/knowledge/${id}/documents/${documentToDelete}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete document') + deleteDocumentMutation( + { knowledgeBaseId: id, documentId: documentToDelete }, + { + onSuccess: () => { + refreshDocuments() + setSelectedDocuments((prev) => { + const newSet = new Set(prev) + newSet.delete(documentToDelete) + return newSet + }) + }, + onSettled: () => { + setShowDeleteDocumentModal(false) + setDocumentToDelete(null) + }, } - - const result = await response.json() - - if (result.success) { - refreshDocuments() - - setSelectedDocuments((prev) => { - const newSet = new Set(prev) - newSet.delete(documentToDelete) - return newSet - }) - } - } catch (err) { - logger.error('Error deleting document:', err) - } finally { - setShowDeleteDocumentModal(false) - setDocumentToDelete(null) - } + ) } /** @@ -818,32 +768,18 @@ export function KnowledgeBase({ /** * Handles deleting the entire knowledge base */ - const handleDeleteKnowledgeBase = async () => { + const handleDeleteKnowledgeBase = () => { if (!knowledgeBase) return - try { - setIsDeleting(true) - - const response = await fetch(`/api/knowledge/${id}`, { - method: 'DELETE', - }) - - if (!response.ok) { - throw new Error('Failed to delete knowledge base') + deleteKnowledgeBaseMutation( + { knowledgeBaseId: id }, + { + onSuccess: () => { + removeKnowledgeBase(id) + router.push(`/workspace/${workspaceId}/knowledge`) + }, } - - const result = await response.json() - - if (result.success) { - removeKnowledgeBase(id) - router.push(`/workspace/${workspaceId}/knowledge`) - } else { - throw new Error(result.error || 'Failed to delete knowledge base') - } - } catch (err) { - logger.error('Error deleting knowledge base:', err) - setIsDeleting(false) - } + ) } /** @@ -856,93 +792,57 @@ export function KnowledgeBase({ /** * Handles bulk enabling of selected documents */ - const handleBulkEnable = async () => { + const handleBulkEnable = () => { const documentsToEnable = documents.filter( (doc) => selectedDocuments.has(doc.id) && !doc.enabled ) if (documentsToEnable.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'enable', + documentIds: documentsToEnable.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + result.updatedDocuments?.forEach((updatedDoc) => { + updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) + }) + logger.info(`Successfully enabled ${result.successCount} documents`) + setSelectedDocuments(new Set()) }, - body: JSON.stringify({ - operation: 'enable', - documentIds: documentsToEnable.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to enable documents') } - - const result = await response.json() - - if (result.success) { - result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => { - updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) - }) - - logger.info(`Successfully enabled ${result.data.successCount} documents`) - } - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error enabling documents:', err) - } finally { - setIsBulkOperating(false) - } + ) } /** * Handles bulk disabling of selected documents */ - const handleBulkDisable = async () => { + const handleBulkDisable = () => { const documentsToDisable = documents.filter( (doc) => selectedDocuments.has(doc.id) && doc.enabled ) if (documentsToDisable.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'disable', + documentIds: documentsToDisable.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + result.updatedDocuments?.forEach((updatedDoc) => { + updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) + }) + logger.info(`Successfully disabled ${result.successCount} documents`) + setSelectedDocuments(new Set()) }, - body: JSON.stringify({ - operation: 'disable', - documentIds: documentsToDisable.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to disable documents') } - - const result = await response.json() - - if (result.success) { - result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => { - updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled }) - }) - - logger.info(`Successfully disabled ${result.data.successCount} documents`) - } - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error disabling documents:', err) - } finally { - setIsBulkOperating(false) - } + ) } /** @@ -956,44 +856,28 @@ export function KnowledgeBase({ /** * Confirms and executes the bulk deletion of selected documents */ - const confirmBulkDelete = async () => { + const confirmBulkDelete = () => { const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id)) if (documentsToDelete.length === 0) return - try { - setIsBulkOperating(true) - - const response = await fetch(`/api/knowledge/${id}/documents`, { - method: 'PATCH', - headers: { - 'Content-Type': 'application/json', + bulkDocumentMutation( + { + knowledgeBaseId: id, + operation: 'delete', + documentIds: documentsToDelete.map((doc) => doc.id), + }, + { + onSuccess: (result) => { + logger.info(`Successfully deleted ${result.successCount} documents`) + refreshDocuments() + setSelectedDocuments(new Set()) + }, + onSettled: () => { + setShowBulkDeleteModal(false) }, - body: JSON.stringify({ - operation: 'delete', - documentIds: documentsToDelete.map((doc) => doc.id), - }), - }) - - if (!response.ok) { - throw new Error('Failed to delete documents') } - - const result = await response.json() - - if (result.success) { - logger.info(`Successfully deleted ${result.data.successCount} documents`) - } - - await refreshDocuments() - - setSelectedDocuments(new Set()) - } catch (err) { - logger.error('Error deleting documents:', err) - } finally { - setIsBulkOperating(false) - setShowBulkDeleteModal(false) - } + ) } const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id)) diff --git a/apps/sim/hooks/queries/knowledge.ts b/apps/sim/hooks/queries/knowledge.ts index 3efd66697..eed34c634 100644 --- a/apps/sim/hooks/queries/knowledge.ts +++ b/apps/sim/hooks/queries/knowledge.ts @@ -1,4 +1,4 @@ -import { keepPreviousData, useQuery } from '@tanstack/react-query' +import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import type { ChunkData, ChunksPagination, @@ -332,3 +332,368 @@ export function useDocumentChunkSearchQuery( placeholderData: keepPreviousData, }) } + +export interface UpdateChunkParams { + knowledgeBaseId: string + documentId: string + chunkId: string + content?: string + enabled?: boolean +} + +export async function updateChunk({ + knowledgeBaseId, + documentId, + chunkId, + content, + enabled, +}: UpdateChunkParams): Promise { + const body: Record = {} + if (content !== undefined) body.content = content + if (enabled !== undefined) body.enabled = enabled + + const response = await fetch( + `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`, + { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + } + ) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to update chunk') + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || 'Failed to update chunk') + } + + return result.data +} + +export function useUpdateChunk() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: updateChunk, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +} + +export interface DeleteChunkParams { + knowledgeBaseId: string + documentId: string + chunkId: string +} + +export async function deleteChunk({ + knowledgeBaseId, + documentId, + chunkId, +}: DeleteChunkParams): Promise { + const response = await fetch( + `/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks/${chunkId}`, + { method: 'DELETE' } + ) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to delete chunk') + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || 'Failed to delete chunk') + } +} + +export function useDeleteChunk() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: deleteChunk, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +} + +export interface CreateChunkParams { + knowledgeBaseId: string + documentId: string + content: string + enabled?: boolean +} + +export async function createChunk({ + knowledgeBaseId, + documentId, + content, + enabled = true, +}: CreateChunkParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ content, enabled }), + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to create chunk') + } + + const result = await response.json() + if (!result?.success || !result?.data) { + throw new Error(result?.error || 'Failed to create chunk') + } + + return result.data +} + +export function useCreateChunk() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: createChunk, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +} + +export interface UpdateDocumentParams { + knowledgeBaseId: string + documentId: string + updates: { + enabled?: boolean + filename?: string + retryProcessing?: boolean + markFailedDueToTimeout?: boolean + } +} + +export async function updateDocument({ + knowledgeBaseId, + documentId, + updates, +}: UpdateDocumentParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(updates), + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to update document') + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || 'Failed to update document') + } + + return result.data +} + +export function useUpdateDocument() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: updateDocument, + onSuccess: (_, { knowledgeBaseId, documentId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.document(knowledgeBaseId, documentId), + }) + }, + }) +} + +export interface DeleteDocumentParams { + knowledgeBaseId: string + documentId: string +} + +export async function deleteDocument({ + knowledgeBaseId, + documentId, +}: DeleteDocumentParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, { + method: 'DELETE', + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to delete document') + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || 'Failed to delete document') + } +} + +export function useDeleteDocument() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: deleteDocument, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +} + +export interface BulkDocumentOperationParams { + knowledgeBaseId: string + operation: 'enable' | 'disable' | 'delete' + documentIds: string[] +} + +export interface BulkDocumentOperationResult { + successCount: number + failedCount: number + updatedDocuments?: Array<{ id: string; enabled: boolean }> +} + +export async function bulkDocumentOperation({ + knowledgeBaseId, + operation, + documentIds, +}: BulkDocumentOperationParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ operation, documentIds }), + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || `Failed to ${operation} documents`) + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || `Failed to ${operation} documents`) + } + + return result.data +} + +export function useBulkDocumentOperation() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: bulkDocumentOperation, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +} + +export interface DeleteKnowledgeBaseParams { + knowledgeBaseId: string +} + +export async function deleteKnowledgeBase({ + knowledgeBaseId, +}: DeleteKnowledgeBaseParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}`, { + method: 'DELETE', + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || 'Failed to delete knowledge base') + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || 'Failed to delete knowledge base') + } +} + +export function useDeleteKnowledgeBase(workspaceId?: string) { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: deleteKnowledgeBase, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.list(workspaceId), + }) + }, + }) +} + +export interface BulkChunkOperationParams { + knowledgeBaseId: string + documentId: string + operation: 'enable' | 'disable' | 'delete' + chunkIds: string[] +} + +export interface BulkChunkOperationResult { + successCount: number + failedCount: number + results: Array<{ + operation: string + chunkIds: string[] + }> +} + +export async function bulkChunkOperation({ + knowledgeBaseId, + documentId, + operation, + chunkIds, +}: BulkChunkOperationParams): Promise { + const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks`, { + method: 'PATCH', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ operation, chunkIds }), + }) + + if (!response.ok) { + const result = await response.json() + throw new Error(result.error || `Failed to ${operation} chunks`) + } + + const result = await response.json() + if (!result?.success) { + throw new Error(result?.error || `Failed to ${operation} chunks`) + } + + return result.data +} + +export function useBulkChunkOperation() { + const queryClient = useQueryClient() + + return useMutation({ + mutationFn: bulkChunkOperation, + onSuccess: (_, { knowledgeBaseId }) => { + queryClient.invalidateQueries({ + queryKey: knowledgeKeys.detail(knowledgeBaseId), + }) + }, + }) +}