mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-05 05:04:10 -05:00
improvement(kb): removed zustand cache syncing in kb, added chunk text tokenizer (#2647)
* improvement(kb): removed zustand cache syncing in kb, added chunk text tokenizer * removed dead code * removed redundant hook * remove unused hook * remove alert notification and use simple error * added more popover actions * removed debug instrumentation * remove extraneous comments * removed unused handler
This commit is contained in:
@@ -1134,9 +1134,9 @@ export default function ResumeExecutionPage({
|
||||
)}
|
||||
</div>
|
||||
{entry.failureReason && (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--text-error)]/20 bg-[var(--text-error)]/10 p-[8px] text-[11px] text-[var(--text-error)]'>
|
||||
<p className='mt-[8px] text-[11px] text-[var(--text-error)]'>
|
||||
{entry.failureReason}
|
||||
</div>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
@@ -1229,9 +1229,9 @@ export default function ResumeExecutionPage({
|
||||
</p>
|
||||
)}
|
||||
{selectedDetail.activeResumeEntry.failureReason && (
|
||||
<div className='mt-[8px] rounded-[4px] border border-[var(--text-error)]/30 bg-[var(--text-error)]/10 p-[12px] text-[13px] text-[var(--text-error)]'>
|
||||
<p className='mt-[8px] text-[12px] text-[var(--text-error)]'>
|
||||
{selectedDetail.activeResumeEntry.failureReason}
|
||||
</div>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
@@ -1363,11 +1363,7 @@ export default function ResumeExecutionPage({
|
||||
)}
|
||||
|
||||
{/* Error/Success Messages */}
|
||||
{error && (
|
||||
<div className='rounded-[6px] border border-[var(--text-error)]/30 bg-[var(--text-error)]/10 p-[16px]'>
|
||||
<p className='text-[13px] text-[var(--text-error)]'>{error}</p>
|
||||
</div>
|
||||
)}
|
||||
{error && <p className='text-[12px] text-[var(--text-error)]'>{error}</p>}
|
||||
|
||||
{message && (
|
||||
<div className='rounded-[6px] border border-[var(--text-success)]/30 bg-[var(--text-success)]/10 p-[16px]'>
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
|
||||
interface ChunkContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: { x: number; y: number }
|
||||
menuRef: React.RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
/**
|
||||
* Chunk-specific actions (shown when right-clicking on a chunk)
|
||||
*/
|
||||
onOpenInNewTab?: () => void
|
||||
onEdit?: () => void
|
||||
onCopyContent?: () => void
|
||||
onToggleEnabled?: () => void
|
||||
onDelete?: () => void
|
||||
/**
|
||||
* Empty space action (shown when right-clicking on empty space)
|
||||
*/
|
||||
onAddChunk?: () => void
|
||||
/**
|
||||
* Whether the chunk is currently enabled
|
||||
*/
|
||||
isChunkEnabled?: boolean
|
||||
/**
|
||||
* Whether a chunk is selected (vs empty space)
|
||||
*/
|
||||
hasChunk: boolean
|
||||
/**
|
||||
* Whether toggle enabled is disabled
|
||||
*/
|
||||
disableToggleEnabled?: boolean
|
||||
/**
|
||||
* Whether delete is disabled
|
||||
*/
|
||||
disableDelete?: boolean
|
||||
/**
|
||||
* Whether add chunk is disabled
|
||||
*/
|
||||
disableAddChunk?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for chunks table.
|
||||
* Shows chunk actions when right-clicking a row, or "Create chunk" when right-clicking empty space.
|
||||
*/
|
||||
export function ChunkContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onEdit,
|
||||
onCopyContent,
|
||||
onToggleEnabled,
|
||||
onDelete,
|
||||
onAddChunk,
|
||||
isChunkEnabled = true,
|
||||
hasChunk,
|
||||
disableToggleEnabled = false,
|
||||
disableDelete = false,
|
||||
disableAddChunk = false,
|
||||
}: ChunkContextMenuProps) {
|
||||
return (
|
||||
<Popover open={isOpen} onOpenChange={onClose} variant='secondary' size='sm'>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{hasChunk ? (
|
||||
<>
|
||||
{onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onOpenInNewTab()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onEdit && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onEdit()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Edit
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onCopyContent && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onCopyContent()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Copy content
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onToggleEnabled && (
|
||||
<PopoverItem
|
||||
disabled={disableToggleEnabled}
|
||||
onClick={() => {
|
||||
onToggleEnabled()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
{isChunkEnabled ? 'Disable' : 'Enable'}
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
onClick={() => {
|
||||
onDelete()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</PopoverItem>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
onAddChunk && (
|
||||
<PopoverItem
|
||||
disabled={disableAddChunk}
|
||||
onClick={() => {
|
||||
onAddChunk()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Create chunk
|
||||
</PopoverItem>
|
||||
)
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export { ChunkContextMenu } from './chunk-context-menu'
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { AlertCircle } from 'lucide-react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
Button,
|
||||
Label,
|
||||
@@ -13,7 +13,8 @@ import {
|
||||
ModalHeader,
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateChunkModal')
|
||||
|
||||
@@ -22,7 +23,6 @@ interface CreateChunkModalProps {
|
||||
onOpenChange: (open: boolean) => void
|
||||
document: DocumentData | null
|
||||
knowledgeBaseId: string
|
||||
onChunkCreated?: (chunk: ChunkData) => void
|
||||
}
|
||||
|
||||
export function CreateChunkModal({
|
||||
@@ -30,8 +30,8 @@ export function CreateChunkModal({
|
||||
onOpenChange,
|
||||
document,
|
||||
knowledgeBaseId,
|
||||
onChunkCreated,
|
||||
}: CreateChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [content, setContent] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -77,9 +77,9 @@ export function CreateChunkModal({
|
||||
if (result.success && result.data) {
|
||||
logger.info('Chunk created successfully:', result.data.id)
|
||||
|
||||
if (onChunkCreated) {
|
||||
onChunkCreated(result.data)
|
||||
}
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
@@ -96,7 +96,6 @@ export function CreateChunkModal({
|
||||
|
||||
const onClose = () => {
|
||||
onOpenChange(false)
|
||||
// Reset form state when modal closes
|
||||
setContent('')
|
||||
setError(null)
|
||||
setShowUnsavedChangesAlert(false)
|
||||
@@ -126,13 +125,7 @@ export function CreateChunkModal({
|
||||
<form>
|
||||
<ModalBody className='!pb-[16px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{/* Error Display */}
|
||||
{error && (
|
||||
<div className='flex items-center gap-2 rounded-md border border-[var(--text-error)]/50 bg-[var(--text-error)]/10 p-3'>
|
||||
<AlertCircle className='h-4 w-4 text-[var(--text-error)]' />
|
||||
<p className='text-[var(--text-error)] text-sm'>{error}</p>
|
||||
</div>
|
||||
)}
|
||||
{error && <p className='text-[12px] text-[var(--text-error)]'>{error}</p>}
|
||||
|
||||
{/* Content Input Section */}
|
||||
<Label htmlFor='content'>Chunk</Label>
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Button, Modal, ModalBody, ModalContent, ModalFooter, ModalHeader } from '@/components/emcn'
|
||||
import type { ChunkData } from '@/stores/knowledge/store'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('DeleteChunkModal')
|
||||
|
||||
@@ -13,7 +15,6 @@ interface DeleteChunkModalProps {
|
||||
documentId: string
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkDeleted?: () => void
|
||||
}
|
||||
|
||||
export function DeleteChunkModal({
|
||||
@@ -22,8 +23,8 @@ export function DeleteChunkModal({
|
||||
documentId,
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkDeleted,
|
||||
}: DeleteChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
const handleDeleteChunk = async () => {
|
||||
@@ -47,16 +48,17 @@ export function DeleteChunkModal({
|
||||
|
||||
if (result.success) {
|
||||
logger.info('Chunk deleted successfully:', chunk.id)
|
||||
if (onChunkDeleted) {
|
||||
onChunkDeleted()
|
||||
}
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
onClose()
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to delete chunk')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error deleting chunk:', err)
|
||||
// You might want to show an error state here
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
|
||||
@@ -18,13 +18,13 @@ import {
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, MAX_TAG_SLOTS } from '@/lib/knowledge/constants'
|
||||
import type { DocumentTag } from '@/lib/knowledge/tags/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'
|
||||
import { type TagDefinitionInput, useTagDefinitions } from '@/hooks/use-tag-definitions'
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('DocumentTagsModal')
|
||||
|
||||
@@ -93,8 +93,6 @@ export function DocumentTagsModal({
|
||||
documentData,
|
||||
onDocumentUpdate,
|
||||
}: DocumentTagsModalProps) {
|
||||
const { updateDocument: updateDocumentInStore } = useKnowledgeStore()
|
||||
|
||||
const documentTagHook = useTagDefinitions(knowledgeBaseId, documentId)
|
||||
const kbTagHook = useKnowledgeBaseTagDefinitions(knowledgeBaseId)
|
||||
const { getNextAvailableSlot: getServerNextSlot } = useNextAvailableSlot(knowledgeBaseId)
|
||||
@@ -171,23 +169,14 @@ export function DocumentTagsModal({
|
||||
throw new Error('Failed to update document tags')
|
||||
}
|
||||
|
||||
updateDocumentInStore(knowledgeBaseId, documentId, tagData as Record<string, string>)
|
||||
onDocumentUpdate?.(tagData as Record<string, string>)
|
||||
|
||||
await fetchTagDefinitions()
|
||||
} catch (error) {
|
||||
logger.error('Error updating document tags:', error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
[
|
||||
documentData,
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
updateDocumentInStore,
|
||||
fetchTagDefinitions,
|
||||
onDocumentUpdate,
|
||||
]
|
||||
[documentData, knowledgeBaseId, documentId, fetchTagDefinitions, onDocumentUpdate]
|
||||
)
|
||||
|
||||
const handleRemoveTag = async (index: number) => {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import * as DialogPrimitive from '@radix-ui/react-dialog'
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, X } from 'lucide-react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
Label,
|
||||
@@ -12,11 +12,14 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Switch,
|
||||
Textarea,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import type { ChunkData, DocumentData } from '@/lib/knowledge/types'
|
||||
import { getAccurateTokenCount, getTokenStrings } from '@/lib/tokenization/estimators'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('EditChunkModal')
|
||||
|
||||
@@ -26,13 +29,12 @@ interface EditChunkModalProps {
|
||||
knowledgeBaseId: string
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
maxChunkSize?: number
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -41,13 +43,14 @@ export function EditChunkModal({
|
||||
knowledgeBaseId,
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
maxChunkSize,
|
||||
}: EditChunkModalProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
@@ -55,9 +58,39 @@ export function EditChunkModal({
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
|
||||
const tokenStrings = useMemo(() => {
|
||||
if (!tokenizerOn || !editedContent) return []
|
||||
return getTokenStrings(editedContent)
|
||||
}, [editedContent, tokenizerOn])
|
||||
|
||||
const tokenCount = useMemo(() => {
|
||||
if (!editedContent) return 0
|
||||
if (tokenizerOn) return tokenStrings.length
|
||||
return getAccurateTokenCount(editedContent)
|
||||
}, [editedContent, tokenizerOn, tokenStrings])
|
||||
|
||||
const TOKEN_BG_COLORS = [
|
||||
'rgba(239, 68, 68, 0.55)', // Red
|
||||
'rgba(249, 115, 22, 0.55)', // Orange
|
||||
'rgba(234, 179, 8, 0.55)', // Yellow
|
||||
'rgba(132, 204, 22, 0.55)', // Lime
|
||||
'rgba(34, 197, 94, 0.55)', // Green
|
||||
'rgba(20, 184, 166, 0.55)', // Teal
|
||||
'rgba(6, 182, 212, 0.55)', // Cyan
|
||||
'rgba(59, 130, 246, 0.55)', // Blue
|
||||
'rgba(139, 92, 246, 0.55)', // Violet
|
||||
'rgba(217, 70, 239, 0.55)', // Fuchsia
|
||||
]
|
||||
|
||||
const getTokenBgColor = (index: number): string => {
|
||||
return TOKEN_BG_COLORS[index % TOKEN_BG_COLORS.length]
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (chunk?.content) {
|
||||
setEditedContent(chunk.content)
|
||||
@@ -96,8 +129,10 @@ export function EditChunkModal({
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
if (result.success) {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -125,7 +160,6 @@ export function EditChunkModal({
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
@@ -173,12 +207,9 @@ export function EditChunkModal({
|
||||
<>
|
||||
<Modal open={isOpen} onOpenChange={handleCloseAttempt}>
|
||||
<ModalContent size='lg'>
|
||||
<div className='flex items-center justify-between px-[16px] py-[10px]'>
|
||||
<DialogPrimitive.Title className='font-medium text-[16px] text-[var(--text-primary)]'>
|
||||
Edit Chunk #{chunk.chunkIndex}
|
||||
</DialogPrimitive.Title>
|
||||
|
||||
<div className='flex flex-shrink-0 items-center gap-[8px]'>
|
||||
<ModalHeader>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span>Edit Chunk #{chunk.chunkIndex}</span>
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Tooltip.Root>
|
||||
@@ -225,42 +256,60 @@ export function EditChunkModal({
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[16px] w-[16px] p-0'
|
||||
onClick={handleCloseAttempt}
|
||||
>
|
||||
<X className='h-[16px] w-[16px]' />
|
||||
<span className='sr-only'>Close</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</ModalHeader>
|
||||
|
||||
<form>
|
||||
<ModalBody className='!pb-[16px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{/* Error Display */}
|
||||
{error && (
|
||||
<div className='flex items-center gap-2 rounded-md border border-[var(--text-error)]/50 bg-[var(--text-error)]/10 p-3'>
|
||||
<AlertCircle className='h-4 w-4 text-[var(--text-error)]' />
|
||||
<p className='text-[var(--text-error)] text-sm'>{error}</p>
|
||||
</div>
|
||||
)}
|
||||
{error && <p className='text-[12px] text-[var(--text-error)]'>{error}</p>}
|
||||
|
||||
{/* Content Input Section */}
|
||||
<Label htmlFor='content'>Chunk</Label>
|
||||
<Textarea
|
||||
id='content'
|
||||
value={editedContent}
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder={
|
||||
userPermissions.canEdit ? 'Enter chunk content...' : 'Read-only view'
|
||||
}
|
||||
rows={20}
|
||||
disabled={isSaving || isNavigating || !userPermissions.canEdit}
|
||||
readOnly={!userPermissions.canEdit}
|
||||
/>
|
||||
{tokenizerOn ? (
|
||||
/* Tokenizer view - matches Textarea styling exactly (transparent border for spacing) */
|
||||
<div
|
||||
className='h-[418px] overflow-y-auto whitespace-pre-wrap break-words rounded-[4px] border border-transparent bg-[var(--surface-5)] px-[8px] py-[8px] font-medium font-sans text-[var(--text-primary)] text-sm'
|
||||
style={{ minHeight: '418px' }}
|
||||
>
|
||||
{tokenStrings.map((token, index) => (
|
||||
<span
|
||||
key={index}
|
||||
style={{
|
||||
backgroundColor: getTokenBgColor(index),
|
||||
}}
|
||||
>
|
||||
{token}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
/* Edit view - regular textarea */
|
||||
<Textarea
|
||||
ref={textareaRef}
|
||||
id='content'
|
||||
value={editedContent}
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder={
|
||||
userPermissions.canEdit ? 'Enter chunk content...' : 'Read-only view'
|
||||
}
|
||||
rows={20}
|
||||
disabled={isSaving || isNavigating || !userPermissions.canEdit}
|
||||
readOnly={!userPermissions.canEdit}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Tokenizer Section */}
|
||||
<div className='flex items-center justify-between pt-[12px]'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
|
||||
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{tokenCount.toLocaleString()}
|
||||
{maxChunkSize !== undefined && `/${maxChunkSize.toLocaleString()}`} tokens
|
||||
</span>
|
||||
</div>
|
||||
</ModalBody>
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { ChunkContextMenu } from './chunk-context-menu'
|
||||
export { CreateChunkModal } from './create-chunk-modal/create-chunk-modal'
|
||||
export { DeleteChunkModal } from './delete-chunk-modal/delete-chunk-modal'
|
||||
export { DocumentTagsModal } from './document-tags-modal/document-tags-modal'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { startTransition, useCallback, useEffect, useState } from 'react'
|
||||
import { startTransition, useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import {
|
||||
@@ -35,7 +35,9 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ChunkContextMenu,
|
||||
CreateChunkModal,
|
||||
DeleteChunkModal,
|
||||
DocumentTagsModal,
|
||||
@@ -43,9 +45,9 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/[id]/[documentId]/components'
|
||||
import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useDocumentChunks } from '@/hooks/use-knowledge'
|
||||
import { type ChunkData, type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/use-knowledge'
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
@@ -260,12 +262,6 @@ export function Document({
|
||||
knowledgeBaseName,
|
||||
documentName,
|
||||
}: DocumentProps) {
|
||||
const {
|
||||
getCachedKnowledgeBase,
|
||||
getCachedDocuments,
|
||||
updateDocument: updateDocumentInStore,
|
||||
removeDocument,
|
||||
} = useKnowledgeStore()
|
||||
const queryClient = useQueryClient()
|
||||
const { workspaceId } = useParams()
|
||||
const router = useRouter()
|
||||
@@ -273,22 +269,19 @@ export function Document({
|
||||
const currentPageFromURL = Number.parseInt(searchParams.get('page') || '1', 10)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
/**
|
||||
* Get cached document synchronously for immediate render
|
||||
*/
|
||||
const getInitialCachedDocument = useCallback(() => {
|
||||
const cachedDocuments = getCachedDocuments(knowledgeBaseId)
|
||||
return cachedDocuments?.documents?.find((d) => d.id === documentId) || null
|
||||
}, [getCachedDocuments, knowledgeBaseId, documentId])
|
||||
const { knowledgeBase } = useKnowledgeBase(knowledgeBaseId)
|
||||
const {
|
||||
document: documentData,
|
||||
isLoading: isLoadingDocument,
|
||||
error: documentError,
|
||||
} = useDocument(knowledgeBaseId, documentId)
|
||||
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
|
||||
// Search state management
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
// Load initial chunks (no search) for immediate display
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
currentPage: initialPage,
|
||||
@@ -299,16 +292,13 @@ export function Document({
|
||||
error: initialError,
|
||||
refreshChunks: initialRefreshChunks,
|
||||
updateChunk: initialUpdateChunk,
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL, '', {
|
||||
enableClientSearch: false,
|
||||
})
|
||||
isFetching: isFetchingChunks,
|
||||
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL)
|
||||
|
||||
// Search results state
|
||||
const [searchResults, setSearchResults] = useState<ChunkData[]>([])
|
||||
const [isLoadingSearch, setIsLoadingSearch] = useState(false)
|
||||
const [searchError, setSearchError] = useState<string | null>(null)
|
||||
|
||||
// Load all search results when query changes
|
||||
useEffect(() => {
|
||||
if (!debouncedSearchQuery.trim()) {
|
||||
setSearchResults([])
|
||||
@@ -326,7 +316,7 @@ export function Document({
|
||||
const allResults: ChunkData[] = []
|
||||
let hasMore = true
|
||||
let offset = 0
|
||||
const limit = 100 // Larger batches for search
|
||||
const limit = 100
|
||||
|
||||
while (hasMore && isMounted) {
|
||||
const response = await fetch(
|
||||
@@ -373,7 +363,6 @@ export function Document({
|
||||
const [selectedChunk, setSelectedChunk] = useState<ChunkData | null>(null)
|
||||
const [isModalOpen, setIsModalOpen] = useState(false)
|
||||
|
||||
// Debounce search query with 200ms delay for optimal UX
|
||||
useEffect(() => {
|
||||
const handler = setTimeout(() => {
|
||||
startTransition(() => {
|
||||
@@ -387,12 +376,7 @@ export function Document({
|
||||
}
|
||||
}, [searchQuery])
|
||||
|
||||
// Determine which data to show
|
||||
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
|
||||
|
||||
// Removed unused allDisplayChunks variable
|
||||
|
||||
// Client-side pagination for search results
|
||||
const SEARCH_PAGE_SIZE = 50
|
||||
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)
|
||||
const searchCurrentPage =
|
||||
@@ -414,7 +398,6 @@ export function Document({
|
||||
|
||||
const goToPage = useCallback(
|
||||
async (page: number) => {
|
||||
// Update URL first for both modes
|
||||
const params = new URLSearchParams(window.location.search)
|
||||
if (page > 1) {
|
||||
params.set('page', page.toString())
|
||||
@@ -424,10 +407,8 @@ export function Document({
|
||||
window.history.replaceState(null, '', `?${params.toString()}`)
|
||||
|
||||
if (showingSearch) {
|
||||
// For search, URL update is sufficient (client-side pagination)
|
||||
return
|
||||
}
|
||||
// For normal view, also trigger server-side pagination
|
||||
return await initialGoToPage(page)
|
||||
},
|
||||
[showingSearch, initialGoToPage]
|
||||
@@ -448,69 +429,24 @@ export function Document({
|
||||
const refreshChunks = showingSearch ? async () => {} : initialRefreshChunks
|
||||
const updateChunk = showingSearch ? (id: string, updates: any) => {} : initialUpdateChunk
|
||||
|
||||
const initialCachedDoc = getInitialCachedDocument()
|
||||
const [documentData, setDocumentData] = useState<DocumentData | null>(initialCachedDoc)
|
||||
const [isLoadingDocument, setIsLoadingDocument] = useState(!initialCachedDoc)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const [isCreateChunkModalOpen, setIsCreateChunkModalOpen] = useState(false)
|
||||
const [chunkToDelete, setChunkToDelete] = useState<ChunkData | null>(null)
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [showDeleteDocumentDialog, setShowDeleteDocumentDialog] = useState(false)
|
||||
const [isDeletingDocument, setIsDeletingDocument] = useState(false)
|
||||
const [contextMenuChunk, setContextMenuChunk] = useState<ChunkData | null>(null)
|
||||
|
||||
const combinedError = error || searchError || initialError
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position: contextMenuPosition,
|
||||
menuRef,
|
||||
handleContextMenu: baseHandleContextMenu,
|
||||
closeMenu: closeContextMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
// URL updates are handled directly in goToPage function to prevent pagination conflicts
|
||||
const combinedError = documentError || searchError || initialError
|
||||
|
||||
useEffect(() => {
|
||||
const fetchDocument = async () => {
|
||||
// Check for cached data first
|
||||
const cachedDocuments = getCachedDocuments(knowledgeBaseId)
|
||||
const cachedDoc = cachedDocuments?.documents?.find((d) => d.id === documentId)
|
||||
|
||||
if (cachedDoc) {
|
||||
setDocumentData(cachedDoc)
|
||||
setIsLoadingDocument(false)
|
||||
return
|
||||
}
|
||||
|
||||
// Only show loading and fetch if we don't have cached data
|
||||
setIsLoadingDocument(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('Document not found')
|
||||
}
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
setDocumentData(result.data)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to fetch document')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error fetching document:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
setIsLoadingDocument(false)
|
||||
}
|
||||
}
|
||||
|
||||
if (knowledgeBaseId && documentId) {
|
||||
fetchDocument()
|
||||
}
|
||||
}, [knowledgeBaseId, documentId, getCachedDocuments])
|
||||
|
||||
const knowledgeBase = getCachedKnowledgeBase(knowledgeBaseId)
|
||||
const effectiveKnowledgeBaseName = knowledgeBase?.name || knowledgeBaseName || 'Knowledge Base'
|
||||
const effectiveDocumentName = documentData?.filename || documentName || 'Document'
|
||||
|
||||
@@ -573,8 +509,7 @@ export function Document({
|
||||
}
|
||||
}
|
||||
|
||||
const handleChunkDeleted = async () => {
|
||||
await refreshChunks()
|
||||
const handleCloseDeleteModal = () => {
|
||||
if (chunkToDelete) {
|
||||
setSelectedChunks((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
@@ -582,9 +517,6 @@ export function Document({
|
||||
return newSet
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseDeleteModal = () => {
|
||||
setIsDeleteModalOpen(false)
|
||||
setChunkToDelete(null)
|
||||
}
|
||||
@@ -609,11 +541,6 @@ export function Document({
|
||||
}
|
||||
}
|
||||
|
||||
const handleChunkCreated = async () => {
|
||||
// Refresh the chunks list to include the new chunk
|
||||
await refreshChunks()
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles deleting the document
|
||||
*/
|
||||
@@ -634,9 +561,6 @@ export function Document({
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
removeDocument(knowledgeBaseId, documentId)
|
||||
|
||||
// Invalidate React Query cache to ensure fresh data on KB page
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
@@ -651,7 +575,6 @@ export function Document({
|
||||
}
|
||||
}
|
||||
|
||||
// Shared utility function for bulk chunk operations
|
||||
const performBulkChunkOperation = async (
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
chunks: ChunkData[]
|
||||
@@ -683,10 +606,8 @@ export function Document({
|
||||
|
||||
if (result.success) {
|
||||
if (operation === 'delete') {
|
||||
// Refresh chunks list to reflect deletions
|
||||
await refreshChunks()
|
||||
} else {
|
||||
// Update successful chunks in the store for enable/disable operations
|
||||
result.data.results.forEach((opResult: any) => {
|
||||
if (opResult.operation === operation) {
|
||||
opResult.chunkIds.forEach((chunkId: string) => {
|
||||
@@ -699,7 +620,6 @@ export function Document({
|
||||
logger.info(`Successfully ${operation}d ${result.data.successCount} chunks`)
|
||||
}
|
||||
|
||||
// Clear selection after successful operation
|
||||
setSelectedChunks(new Set())
|
||||
} catch (err) {
|
||||
logger.error(`Error ${operation}ing chunks:`, err)
|
||||
@@ -727,22 +647,60 @@ export function Document({
|
||||
await performBulkChunkOperation('delete', chunksToDelete)
|
||||
}
|
||||
|
||||
// Calculate bulk operation counts
|
||||
const selectedChunksList = displayChunks.filter((chunk) => selectedChunks.has(chunk.id))
|
||||
const enabledCount = selectedChunksList.filter((chunk) => chunk.enabled).length
|
||||
const disabledCount = selectedChunksList.filter((chunk) => !chunk.enabled).length
|
||||
|
||||
const isAllSelected = displayChunks.length > 0 && selectedChunks.size === displayChunks.length
|
||||
|
||||
const handleDocumentTagsUpdate = useCallback(
|
||||
(tagData: Record<string, string>) => {
|
||||
updateDocumentInStore(knowledgeBaseId, documentId, tagData)
|
||||
setDocumentData((prev) => (prev ? { ...prev, ...tagData } : null))
|
||||
/**
|
||||
* Handle right-click on a chunk row
|
||||
*/
|
||||
const handleChunkContextMenu = useCallback(
|
||||
(e: React.MouseEvent, chunk: ChunkData) => {
|
||||
setContextMenuChunk(chunk)
|
||||
baseHandleContextMenu(e)
|
||||
},
|
||||
[knowledgeBaseId, documentId, updateDocumentInStore]
|
||||
[baseHandleContextMenu]
|
||||
)
|
||||
|
||||
if (isLoadingDocument) {
|
||||
/**
|
||||
* Handle right-click on empty space (table container)
|
||||
*/
|
||||
const handleEmptyContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
setContextMenuChunk(null)
|
||||
baseHandleContextMenu(e)
|
||||
},
|
||||
[baseHandleContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle context menu close
|
||||
*/
|
||||
const handleContextMenuClose = useCallback(() => {
|
||||
closeContextMenu()
|
||||
setContextMenuChunk(null)
|
||||
}, [closeContextMenu])
|
||||
|
||||
const handleDocumentTagsUpdate = useCallback(() => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId, documentId),
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, queryClient])
|
||||
|
||||
const prevDocumentIdRef = useRef<string>(documentId)
|
||||
const isNavigatingToNewDoc = prevDocumentIdRef.current !== documentId
|
||||
|
||||
useEffect(() => {
|
||||
if (documentData && documentData.id === documentId) {
|
||||
prevDocumentIdRef.current = documentId
|
||||
}
|
||||
}, [documentData, documentId])
|
||||
|
||||
const isFetchingNewDoc = isNavigatingToNewDoc && isFetchingChunks
|
||||
|
||||
if (isLoadingDocument || isFetchingNewDoc) {
|
||||
return (
|
||||
<DocumentLoading
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
@@ -892,7 +850,10 @@ export function Document({
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
|
||||
<div
|
||||
className='mt-[12px] flex flex-1 flex-col overflow-hidden'
|
||||
onContextMenu={handleEmptyContextMenu}
|
||||
>
|
||||
{displayChunks.length === 0 && documentData?.processingStatus === 'completed' ? (
|
||||
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
@@ -990,6 +951,7 @@ export function Document({
|
||||
key={chunk.id}
|
||||
className='cursor-pointer hover:bg-[var(--surface-2)]'
|
||||
onClick={() => handleChunkClick(chunk)}
|
||||
onContextMenu={(e) => handleChunkContextMenu(e, chunk)}
|
||||
>
|
||||
<TableCell
|
||||
className='w-[52px] py-[8px]'
|
||||
@@ -1152,16 +1114,13 @@ export function Document({
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
isOpen={isModalOpen}
|
||||
onClose={handleCloseModal}
|
||||
onChunkUpdate={(updatedChunk: ChunkData) => {
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={displayChunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
maxChunkSize={knowledgeBase?.chunkingConfig?.maxSize}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
@@ -1173,7 +1132,6 @@ export function Document({
|
||||
setSelectedChunk(displayChunks[displayChunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
@@ -1188,7 +1146,6 @@ export function Document({
|
||||
onOpenChange={setIsCreateChunkModalOpen}
|
||||
document={documentData}
|
||||
knowledgeBaseId={knowledgeBaseId}
|
||||
onChunkCreated={handleChunkCreated}
|
||||
/>
|
||||
|
||||
{/* Delete Chunk Modal */}
|
||||
@@ -1198,7 +1155,6 @@ export function Document({
|
||||
documentId={documentId}
|
||||
isOpen={isDeleteModalOpen}
|
||||
onClose={handleCloseDeleteModal}
|
||||
onChunkDeleted={handleChunkDeleted}
|
||||
/>
|
||||
|
||||
{/* Bulk Action Bar */}
|
||||
@@ -1242,6 +1198,56 @@ export function Document({
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
|
||||
<ChunkContextMenu
|
||||
isOpen={isContextMenuOpen}
|
||||
position={contextMenuPosition}
|
||||
menuRef={menuRef}
|
||||
onClose={handleContextMenuClose}
|
||||
hasChunk={contextMenuChunk !== null}
|
||||
isChunkEnabled={contextMenuChunk?.enabled ?? true}
|
||||
onOpenInNewTab={
|
||||
contextMenuChunk
|
||||
? () => {
|
||||
const url = `/workspace/${workspaceId}/knowledge/${knowledgeBaseId}/${documentId}?chunk=${contextMenuChunk.id}`
|
||||
window.open(url, '_blank')
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onEdit={
|
||||
contextMenuChunk
|
||||
? () => {
|
||||
setSelectedChunk(contextMenuChunk)
|
||||
setIsModalOpen(true)
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onCopyContent={
|
||||
contextMenuChunk
|
||||
? () => {
|
||||
navigator.clipboard.writeText(contextMenuChunk.content)
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onToggleEnabled={
|
||||
contextMenuChunk && userPermissions.canEdit
|
||||
? () => handleToggleEnabled(contextMenuChunk.id)
|
||||
: undefined
|
||||
}
|
||||
onDelete={
|
||||
contextMenuChunk && userPermissions.canEdit
|
||||
? () => handleDeleteChunk(contextMenuChunk.id)
|
||||
: undefined
|
||||
}
|
||||
onAddChunk={
|
||||
userPermissions.canEdit && documentData?.processingStatus !== 'failed'
|
||||
? () => setIsCreateChunkModalOpen(true)
|
||||
: undefined
|
||||
}
|
||||
disableToggleEnabled={!userPermissions.canEdit}
|
||||
disableDelete={!userPermissions.canEdit}
|
||||
disableAddChunk={!userPermissions.canEdit || documentData?.processingStatus === 'failed'}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { format } from 'date-fns'
|
||||
import {
|
||||
@@ -41,13 +41,16 @@ import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ActionBar,
|
||||
AddDocumentsModal,
|
||||
BaseTagsModal,
|
||||
DocumentContextMenu,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import {
|
||||
useKnowledgeBase,
|
||||
useKnowledgeBaseDocuments,
|
||||
@@ -57,7 +60,6 @@ import {
|
||||
type TagDefinition,
|
||||
useKnowledgeBaseTagDefinitions,
|
||||
} from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
import type { DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
@@ -429,6 +431,15 @@ export function KnowledgeBase({
|
||||
const [currentPage, setCurrentPage] = useState(1)
|
||||
const [sortBy, setSortBy] = useState<DocumentSortField>('uploadedAt')
|
||||
const [sortOrder, setSortOrder] = useState<SortOrder>('desc')
|
||||
const [contextMenuDocument, setContextMenuDocument] = useState<DocumentData | null>(null)
|
||||
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position: contextMenuPosition,
|
||||
menuRef,
|
||||
handleContextMenu: baseHandleContextMenu,
|
||||
closeMenu: closeContextMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
const {
|
||||
knowledgeBase,
|
||||
@@ -440,6 +451,8 @@ export function KnowledgeBase({
|
||||
documents,
|
||||
pagination,
|
||||
isLoading: isLoadingDocuments,
|
||||
isFetching: isFetchingDocuments,
|
||||
isPlaceholderData: isPlaceholderDocuments,
|
||||
error: documentsError,
|
||||
updateDocument,
|
||||
refreshDocuments,
|
||||
@@ -591,7 +604,6 @@ export function KnowledgeBase({
|
||||
|
||||
const newEnabled = !document.enabled
|
||||
|
||||
// Optimistic update - immediately update the UI
|
||||
updateDocument(docId, { enabled: newEnabled })
|
||||
|
||||
try {
|
||||
@@ -612,11 +624,9 @@ export function KnowledgeBase({
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
// Revert on failure
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
}
|
||||
} catch (err) {
|
||||
// Revert on error
|
||||
updateDocument(docId, { enabled: !newEnabled })
|
||||
logger.error('Error updating document:', err)
|
||||
}
|
||||
@@ -840,7 +850,6 @@ export function KnowledgeBase({
|
||||
const result = await response.json()
|
||||
|
||||
if (result.success) {
|
||||
// Update successful documents in the store
|
||||
result.data.updatedDocuments.forEach((updatedDoc: { id: string; enabled: boolean }) => {
|
||||
updateDocument(updatedDoc.id, { enabled: updatedDoc.enabled })
|
||||
})
|
||||
@@ -848,7 +857,6 @@ export function KnowledgeBase({
|
||||
logger.info(`Successfully enabled ${result.data.successCount} documents`)
|
||||
}
|
||||
|
||||
// Clear selection after successful operation
|
||||
setSelectedDocuments(new Set())
|
||||
} catch (err) {
|
||||
logger.error('Error enabling documents:', err)
|
||||
@@ -958,7 +966,49 @@ export function KnowledgeBase({
|
||||
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
|
||||
if ((isLoadingKnowledgeBase || isLoadingDocuments) && !knowledgeBase && documents.length === 0) {
|
||||
/**
|
||||
* Handle right-click on a document row
|
||||
*/
|
||||
const handleDocumentContextMenu = useCallback(
|
||||
(e: React.MouseEvent, doc: DocumentData) => {
|
||||
setContextMenuDocument(doc)
|
||||
baseHandleContextMenu(e)
|
||||
},
|
||||
[baseHandleContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle right-click on empty space (table container)
|
||||
*/
|
||||
const handleEmptyContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
setContextMenuDocument(null)
|
||||
baseHandleContextMenu(e)
|
||||
},
|
||||
[baseHandleContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle context menu close
|
||||
*/
|
||||
const handleContextMenuClose = useCallback(() => {
|
||||
closeContextMenu()
|
||||
setContextMenuDocument(null)
|
||||
}, [closeContextMenu])
|
||||
|
||||
const prevKnowledgeBaseIdRef = useRef<string>(id)
|
||||
const isNavigatingToNewKB = prevKnowledgeBaseIdRef.current !== id
|
||||
|
||||
useEffect(() => {
|
||||
if (knowledgeBase && knowledgeBase.id === id) {
|
||||
prevKnowledgeBaseIdRef.current = id
|
||||
}
|
||||
}, [knowledgeBase, id])
|
||||
|
||||
const isInitialLoad = isLoadingKnowledgeBase && !knowledgeBase
|
||||
const isFetchingNewKB = isNavigatingToNewKB && isFetchingDocuments
|
||||
|
||||
if (isInitialLoad || isFetchingNewKB) {
|
||||
return <KnowledgeBaseLoading knowledgeBaseName={knowledgeBaseName} />
|
||||
}
|
||||
|
||||
@@ -1106,7 +1156,7 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col'>
|
||||
<div className='mt-[12px] flex flex-1 flex-col' onContextMenu={handleEmptyContextMenu}>
|
||||
{isLoadingDocuments && documents.length === 0 ? (
|
||||
<DocumentTableSkeleton rowCount={5} />
|
||||
) : documents.length === 0 ? (
|
||||
@@ -1168,6 +1218,7 @@ export function KnowledgeBase({
|
||||
handleDocumentClick(doc.id)
|
||||
}
|
||||
}}
|
||||
onContextMenu={(e) => handleDocumentContextMenu(e, doc)}
|
||||
>
|
||||
<TableCell className='w-[28px] py-[8px] pr-0 pl-0'>
|
||||
<div className='flex items-center justify-center'>
|
||||
@@ -1505,7 +1556,6 @@ export function KnowledgeBase({
|
||||
onOpenChange={setShowAddDocumentsModal}
|
||||
knowledgeBaseId={id}
|
||||
chunkingConfig={knowledgeBase?.chunkingConfig}
|
||||
onUploadComplete={refreshDocuments}
|
||||
/>
|
||||
|
||||
<ActionBar
|
||||
@@ -1517,6 +1567,67 @@ export function KnowledgeBase({
|
||||
disabledCount={disabledCount}
|
||||
isLoading={isBulkOperating}
|
||||
/>
|
||||
|
||||
<DocumentContextMenu
|
||||
isOpen={isContextMenuOpen}
|
||||
position={contextMenuPosition}
|
||||
menuRef={menuRef}
|
||||
onClose={handleContextMenuClose}
|
||||
hasDocument={contextMenuDocument !== null}
|
||||
isDocumentEnabled={contextMenuDocument?.enabled ?? true}
|
||||
hasTags={
|
||||
contextMenuDocument
|
||||
? getDocumentTags(contextMenuDocument, tagDefinitions).length > 0
|
||||
: false
|
||||
}
|
||||
onOpenInNewTab={
|
||||
contextMenuDocument
|
||||
? () => {
|
||||
const urlParams = new URLSearchParams({
|
||||
kbName: knowledgeBaseName,
|
||||
docName: contextMenuDocument.filename || 'Document',
|
||||
})
|
||||
window.open(
|
||||
`/workspace/${workspaceId}/knowledge/${id}/${contextMenuDocument.id}?${urlParams.toString()}`,
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onToggleEnabled={
|
||||
contextMenuDocument && userPermissions.canEdit
|
||||
? () => handleToggleEnabled(contextMenuDocument.id)
|
||||
: undefined
|
||||
}
|
||||
onViewTags={
|
||||
contextMenuDocument
|
||||
? () => {
|
||||
const urlParams = new URLSearchParams({
|
||||
kbName: knowledgeBaseName,
|
||||
docName: contextMenuDocument.filename || 'Document',
|
||||
})
|
||||
router.push(
|
||||
`/workspace/${workspaceId}/knowledge/${id}/${contextMenuDocument.id}?${urlParams.toString()}`
|
||||
)
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
onDelete={
|
||||
contextMenuDocument && userPermissions.canEdit
|
||||
? () => handleDeleteDocument(contextMenuDocument.id)
|
||||
: undefined
|
||||
}
|
||||
onAddDocument={userPermissions.canEdit ? handleAddDocuments : undefined}
|
||||
disableToggleEnabled={
|
||||
!userPermissions.canEdit ||
|
||||
contextMenuDocument?.processingStatus === 'processing' ||
|
||||
contextMenuDocument?.processingStatus === 'pending'
|
||||
}
|
||||
disableDelete={
|
||||
!userPermissions.canEdit || contextMenuDocument?.processingStatus === 'processing'
|
||||
}
|
||||
disableAddDocument={!userPermissions.canEdit}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -33,7 +33,6 @@ interface AddDocumentsModalProps {
|
||||
minSize: number
|
||||
overlap: number
|
||||
}
|
||||
onUploadComplete?: () => void
|
||||
}
|
||||
|
||||
export function AddDocumentsModal({
|
||||
@@ -41,7 +40,6 @@ export function AddDocumentsModal({
|
||||
onOpenChange,
|
||||
knowledgeBaseId,
|
||||
chunkingConfig,
|
||||
onUploadComplete,
|
||||
}: AddDocumentsModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -54,11 +52,6 @@ export function AddDocumentsModal({
|
||||
|
||||
const { isUploading, uploadProgress, uploadFiles, uploadError, clearError } = useKnowledgeUpload({
|
||||
workspaceId,
|
||||
onUploadComplete: () => {
|
||||
logger.info(`Successfully uploaded ${files.length} files`)
|
||||
onUploadComplete?.()
|
||||
handleClose()
|
||||
},
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
@@ -219,6 +212,8 @@ export function AddDocumentsModal({
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
logger.info(`Successfully uploaded ${files.length} files`)
|
||||
handleClose()
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
|
||||
interface DocumentContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: { x: number; y: number }
|
||||
menuRef: React.RefObject<HTMLDivElement | null>
|
||||
onClose: () => void
|
||||
/**
|
||||
* Document-specific actions (shown when right-clicking on a document)
|
||||
*/
|
||||
onOpenInNewTab?: () => void
|
||||
onToggleEnabled?: () => void
|
||||
onViewTags?: () => void
|
||||
onDelete?: () => void
|
||||
/**
|
||||
* Empty space action (shown when right-clicking on empty space)
|
||||
*/
|
||||
onAddDocument?: () => void
|
||||
/**
|
||||
* Whether the document is currently enabled
|
||||
*/
|
||||
isDocumentEnabled?: boolean
|
||||
/**
|
||||
* Whether a document is selected (vs empty space)
|
||||
*/
|
||||
hasDocument: boolean
|
||||
/**
|
||||
* Whether the document has tags to view
|
||||
*/
|
||||
hasTags?: boolean
|
||||
/**
|
||||
* Whether toggle enabled is disabled
|
||||
*/
|
||||
disableToggleEnabled?: boolean
|
||||
/**
|
||||
* Whether delete is disabled
|
||||
*/
|
||||
disableDelete?: boolean
|
||||
/**
|
||||
* Whether add document is disabled
|
||||
*/
|
||||
disableAddDocument?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for documents table.
|
||||
* Shows document actions when right-clicking a row, or "Add Document" when right-clicking empty space.
|
||||
*/
|
||||
export function DocumentContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onToggleEnabled,
|
||||
onViewTags,
|
||||
onDelete,
|
||||
onAddDocument,
|
||||
isDocumentEnabled = true,
|
||||
hasDocument,
|
||||
hasTags = false,
|
||||
disableToggleEnabled = false,
|
||||
disableDelete = false,
|
||||
disableAddDocument = false,
|
||||
}: DocumentContextMenuProps) {
|
||||
return (
|
||||
<Popover open={isOpen} onOpenChange={onClose} variant='secondary' size='sm'>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{hasDocument ? (
|
||||
<>
|
||||
{onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onOpenInNewTab()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{hasTags && onViewTags && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onViewTags()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
View tags
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onToggleEnabled && (
|
||||
<PopoverItem
|
||||
disabled={disableToggleEnabled}
|
||||
onClick={() => {
|
||||
onToggleEnabled()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
{isDocumentEnabled ? 'Disable' : 'Enable'}
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onDelete && (
|
||||
<PopoverItem
|
||||
disabled={disableDelete}
|
||||
onClick={() => {
|
||||
onDelete()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</PopoverItem>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
onAddDocument && (
|
||||
<PopoverItem
|
||||
disabled={disableAddDocument}
|
||||
onClick={() => {
|
||||
onAddDocument()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Add document
|
||||
</PopoverItem>
|
||||
)
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export { DocumentContextMenu } from './document-context-menu'
|
||||
@@ -1,3 +1,4 @@
|
||||
export { ActionBar } from './action-bar/action-bar'
|
||||
export { AddDocumentsModal } from './add-documents-modal/add-documents-modal'
|
||||
export { BaseTagsModal } from './base-tags-modal/base-tags-modal'
|
||||
export { DocumentContextMenu } from './document-context-menu'
|
||||
|
||||
@@ -216,6 +216,7 @@ export function BaseCard({
|
||||
onClick={handleClick}
|
||||
onKeyDown={handleKeyDown}
|
||||
onContextMenu={handleContextMenu}
|
||||
data-kb-card
|
||||
>
|
||||
<div className='group flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] transition-colors hover:bg-[var(--surface-4)] dark:bg-[var(--surface-4)] dark:hover:bg-[var(--surface-5)]'>
|
||||
<div className='flex items-center justify-between gap-[8px]'>
|
||||
@@ -261,6 +262,7 @@ export function BaseCard({
|
||||
onClose={closeContextMenu}
|
||||
onOpenInNewTab={handleOpenInNewTab}
|
||||
onViewTags={handleViewTags}
|
||||
onCopyId={id ? () => navigator.clipboard.writeText(id) : undefined}
|
||||
onEdit={handleEdit}
|
||||
onDelete={handleDelete}
|
||||
showOpenInNewTab={true}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { Loader2, RotateCcw, X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useForm } from 'react-hook-form'
|
||||
@@ -22,7 +23,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('CreateBaseModal')
|
||||
|
||||
@@ -33,7 +34,6 @@ interface FileWithPreview extends File {
|
||||
interface CreateBaseModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
onKnowledgeBaseCreated?: (knowledgeBase: KnowledgeBaseData) => void
|
||||
}
|
||||
|
||||
const FormSchema = z
|
||||
@@ -79,13 +79,10 @@ interface SubmitStatus {
|
||||
message: string
|
||||
}
|
||||
|
||||
export function CreateBaseModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
onKnowledgeBaseCreated,
|
||||
}: CreateBaseModalProps) {
|
||||
export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
const [isSubmitting, setIsSubmitting] = useState(false)
|
||||
@@ -100,9 +97,6 @@ export function CreateBaseModal({
|
||||
|
||||
const { uploadFiles, isUploading, uploadProgress, uploadError, clearError } = useKnowledgeUpload({
|
||||
workspaceId,
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
const handleClose = (open: boolean) => {
|
||||
@@ -300,13 +294,10 @@ export function CreateBaseModal({
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
logger.info(`Started processing ${uploadedFiles.length} documents in the background`)
|
||||
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
if (onKnowledgeBaseCreated) {
|
||||
onKnowledgeBaseCreated(newKnowledgeBase)
|
||||
}
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
} catch (uploadError) {
|
||||
// If file upload fails completely, delete the knowledge base to avoid orphaned empty KB
|
||||
logger.error('File upload failed, deleting knowledge base:', uploadError)
|
||||
try {
|
||||
await fetch(`/api/knowledge/${newKnowledgeBase.id}`, {
|
||||
@@ -319,9 +310,9 @@ export function CreateBaseModal({
|
||||
throw uploadError
|
||||
}
|
||||
} else {
|
||||
if (onKnowledgeBaseCreated) {
|
||||
onKnowledgeBaseCreated(newKnowledgeBase)
|
||||
}
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.list(workspaceId),
|
||||
})
|
||||
}
|
||||
|
||||
files.forEach((file) => URL.revokeObjectURL(file.preview))
|
||||
|
||||
@@ -5,3 +5,4 @@ export { EditKnowledgeBaseModal } from './edit-knowledge-base-modal/edit-knowled
|
||||
export { getDocumentIcon } from './icons/document-icons'
|
||||
export { KnowledgeBaseContextMenu } from './knowledge-base-context-menu/knowledge-base-context-menu'
|
||||
export { KnowledgeHeader } from './knowledge-header/knowledge-header'
|
||||
export { KnowledgeListContextMenu } from './knowledge-list-context-menu/knowledge-list-context-menu'
|
||||
|
||||
@@ -27,6 +27,10 @@ interface KnowledgeBaseContextMenuProps {
|
||||
* Callback when view tags is clicked
|
||||
*/
|
||||
onViewTags?: () => void
|
||||
/**
|
||||
* Callback when copy ID is clicked
|
||||
*/
|
||||
onCopyId?: () => void
|
||||
/**
|
||||
* Callback when edit is clicked
|
||||
*/
|
||||
@@ -78,6 +82,7 @@ export function KnowledgeBaseContextMenu({
|
||||
onClose,
|
||||
onOpenInNewTab,
|
||||
onViewTags,
|
||||
onCopyId,
|
||||
onEdit,
|
||||
onDelete,
|
||||
showOpenInNewTab = true,
|
||||
@@ -119,6 +124,16 @@ export function KnowledgeBaseContextMenu({
|
||||
View tags
|
||||
</PopoverItem>
|
||||
)}
|
||||
{onCopyId && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onCopyId()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Copy ID
|
||||
</PopoverItem>
|
||||
)}
|
||||
{showEdit && onEdit && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { AlertTriangle, ChevronDown, LibraryBig, MoreHorizontal } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import {
|
||||
@@ -14,7 +15,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { filterButtonClass } from '@/app/workspace/[workspaceId]/knowledge/components/constants'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeHeader')
|
||||
|
||||
@@ -53,7 +54,7 @@ interface Workspace {
|
||||
}
|
||||
|
||||
export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps) {
|
||||
const { updateKnowledgeBase } = useKnowledgeStore()
|
||||
const queryClient = useQueryClient()
|
||||
const [isActionsPopoverOpen, setIsActionsPopoverOpen] = useState(false)
|
||||
const [isWorkspacePopoverOpen, setIsWorkspacePopoverOpen] = useState(false)
|
||||
const [workspaces, setWorkspaces] = useState<Workspace[]>([])
|
||||
@@ -124,11 +125,11 @@ export function KnowledgeHeader({ breadcrumbs, options }: KnowledgeHeaderProps)
|
||||
`Knowledge base workspace updated: ${options.knowledgeBaseId} -> ${workspaceId}`
|
||||
)
|
||||
|
||||
// Notify parent component of the change to refresh data
|
||||
await options.onWorkspaceChange?.(workspaceId)
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(options.knowledgeBaseId),
|
||||
})
|
||||
|
||||
// Update the store after refresh to ensure consistency
|
||||
updateKnowledgeBase(options.knowledgeBaseId, { workspaceId: workspaceId || undefined })
|
||||
await options.onWorkspaceChange?.(workspaceId)
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update workspace')
|
||||
}
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
'use client'
|
||||
|
||||
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
|
||||
|
||||
interface KnowledgeListContextMenuProps {
|
||||
/**
|
||||
* Whether the context menu is open
|
||||
*/
|
||||
isOpen: boolean
|
||||
/**
|
||||
* Position of the context menu
|
||||
*/
|
||||
position: { x: number; y: number }
|
||||
/**
|
||||
* Ref for the menu element
|
||||
*/
|
||||
menuRef: React.RefObject<HTMLDivElement | null>
|
||||
/**
|
||||
* Callback when menu should close
|
||||
*/
|
||||
onClose: () => void
|
||||
/**
|
||||
* Callback when add knowledge base is clicked
|
||||
*/
|
||||
onAddKnowledgeBase?: () => void
|
||||
/**
|
||||
* Whether the add option is disabled
|
||||
* @default false
|
||||
*/
|
||||
disableAdd?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu component for the knowledge base list page.
|
||||
* Displays "Add knowledge base" option when right-clicking on empty space.
|
||||
*/
|
||||
export function KnowledgeListContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
onClose,
|
||||
onAddKnowledgeBase,
|
||||
disableAdd = false,
|
||||
}: KnowledgeListContextMenuProps) {
|
||||
return (
|
||||
<Popover open={isOpen} onOpenChange={onClose} variant='secondary' size='sm'>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${position.x}px`,
|
||||
top: `${position.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{onAddKnowledgeBase && (
|
||||
<PopoverItem
|
||||
disabled={disableAdd}
|
||||
onClick={() => {
|
||||
onAddKnowledgeBase()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Add knowledge base
|
||||
</PopoverItem>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import { knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
@@ -51,7 +53,6 @@ export interface ProcessingOptions {
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
workspaceId?: string
|
||||
}
|
||||
@@ -337,6 +338,7 @@ const getPresignedData = async (
|
||||
* Hook for managing file uploads to knowledge bases
|
||||
*/
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const queryClient = useQueryClient()
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
@@ -1071,7 +1073,9 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
|
||||
@@ -13,10 +13,12 @@ import {
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import type { KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
BaseCard,
|
||||
BaseCardSkeletonGrid,
|
||||
CreateBaseModal,
|
||||
KnowledgeListContextMenu,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import {
|
||||
SORT_OPTIONS,
|
||||
@@ -28,9 +30,9 @@ import {
|
||||
sortKnowledgeBases,
|
||||
} from '@/app/workspace/[workspaceId]/knowledge/utils/sort'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
|
||||
import { type KnowledgeBaseData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('Knowledge')
|
||||
|
||||
@@ -49,7 +51,7 @@ export function Knowledge() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { knowledgeBases, isLoading, error, addKnowledgeBase, removeKnowledgeBase, refreshList } =
|
||||
const { knowledgeBases, isLoading, error, removeKnowledgeBase, updateKnowledgeBase } =
|
||||
useKnowledgeBasesList(workspaceId)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
@@ -60,6 +62,37 @@ export function Knowledge() {
|
||||
const [sortBy, setSortBy] = useState<SortOption>('updatedAt')
|
||||
const [sortOrder, setSortOrder] = useState<SortOrder>('desc')
|
||||
|
||||
const {
|
||||
isOpen: isListContextMenuOpen,
|
||||
position: listContextMenuPosition,
|
||||
menuRef: listMenuRef,
|
||||
handleContextMenu: handleListContextMenu,
|
||||
closeMenu: closeListContextMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
/**
|
||||
* Handle context menu on the content area - only show menu when clicking on empty space
|
||||
*/
|
||||
const handleContentContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
const target = e.target as HTMLElement
|
||||
const isOnCard = target.closest('[data-kb-card]')
|
||||
const isOnInteractive = target.closest('button, input, a, [role="button"]')
|
||||
|
||||
if (!isOnCard && !isOnInteractive) {
|
||||
handleListContextMenu(e)
|
||||
}
|
||||
},
|
||||
[handleListContextMenu]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle add knowledge base from context menu
|
||||
*/
|
||||
const handleAddKnowledgeBase = useCallback(() => {
|
||||
setIsCreateModalOpen(true)
|
||||
}, [])
|
||||
|
||||
const currentSortValue = `${sortBy}-${sortOrder}`
|
||||
const currentSortLabel =
|
||||
SORT_OPTIONS.find((opt) => opt.value === currentSortValue)?.label || 'Last Updated'
|
||||
@@ -74,22 +107,6 @@ export function Knowledge() {
|
||||
setIsSortPopoverOpen(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback when a new knowledge base is created
|
||||
*/
|
||||
const handleKnowledgeBaseCreated = (newKnowledgeBase: KnowledgeBaseData) => {
|
||||
addKnowledgeBase(newKnowledgeBase)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry loading knowledge bases after an error
|
||||
*/
|
||||
const handleRetry = () => {
|
||||
refreshList()
|
||||
}
|
||||
|
||||
const { updateKnowledgeBase: updateKnowledgeBaseInStore } = useKnowledgeStore()
|
||||
|
||||
/**
|
||||
* Updates a knowledge base name and description
|
||||
*/
|
||||
@@ -112,13 +129,12 @@ export function Knowledge() {
|
||||
|
||||
if (result.success) {
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
updateKnowledgeBaseInStore(id, { name, description })
|
||||
await refreshList()
|
||||
updateKnowledgeBase(id, { name, description })
|
||||
} else {
|
||||
throw new Error(result.error || 'Failed to update knowledge base')
|
||||
}
|
||||
},
|
||||
[refreshList, updateKnowledgeBaseInStore]
|
||||
[updateKnowledgeBase]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -149,7 +165,6 @@ export function Knowledge() {
|
||||
|
||||
/**
|
||||
* Filter and sort knowledge bases based on search query and sort options
|
||||
* Memoized to prevent unnecessary recalculations on render
|
||||
*/
|
||||
const filteredAndSortedKnowledgeBases = useMemo(() => {
|
||||
const filtered = filterKnowledgeBases(knowledgeBases, debouncedSearchQuery)
|
||||
@@ -170,7 +185,6 @@ export function Knowledge() {
|
||||
|
||||
/**
|
||||
* Get empty state content based on current filters
|
||||
* Memoized to prevent unnecessary recalculations on render
|
||||
*/
|
||||
const emptyState = useMemo(() => {
|
||||
if (debouncedSearchQuery) {
|
||||
@@ -193,7 +207,10 @@ export function Knowledge() {
|
||||
<>
|
||||
<div className='flex h-full flex-1 flex-col'>
|
||||
<div className='flex flex-1 overflow-hidden'>
|
||||
<div className='flex flex-1 flex-col overflow-auto bg-white px-[24px] pt-[28px] pb-[24px] dark:bg-[var(--bg)]'>
|
||||
<div
|
||||
className='flex flex-1 flex-col overflow-auto bg-white px-[24px] pt-[28px] pb-[24px] dark:bg-[var(--bg)]'
|
||||
onContextMenu={handleContentContextMenu}
|
||||
>
|
||||
<div>
|
||||
<div className='flex items-start gap-[12px]'>
|
||||
<div className='flex h-[26px] w-[26px] items-center justify-center rounded-[6px] border border-[#5BB377] bg-[#E8F7EE] dark:border-[#1E5A3E] dark:bg-[#0F3D2C]'>
|
||||
@@ -307,11 +324,16 @@ export function Knowledge() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<CreateBaseModal
|
||||
open={isCreateModalOpen}
|
||||
onOpenChange={setIsCreateModalOpen}
|
||||
onKnowledgeBaseCreated={handleKnowledgeBaseCreated}
|
||||
<KnowledgeListContextMenu
|
||||
isOpen={isListContextMenuOpen}
|
||||
position={listContextMenuPosition}
|
||||
menuRef={listMenuRef}
|
||||
onClose={closeListContextMenu}
|
||||
onAddKnowledgeBase={handleAddKnowledgeBase}
|
||||
disableAdd={userPermissions.canEdit !== true}
|
||||
/>
|
||||
|
||||
<CreateBaseModal open={isCreateModalOpen} onOpenChange={setIsCreateModalOpen} />
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import type { SortOption, SortOrder } from '../components/constants'
|
||||
|
||||
interface KnowledgeBaseWithDocCount extends KnowledgeBaseData {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { AlertCircle, Plus, X } from 'lucide-react'
|
||||
import { Plus, X } from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
@@ -647,12 +647,7 @@ export function NotificationSettings({
|
||||
<div className='flex h-full flex-col gap-[16px]'>
|
||||
<div className='min-h-0 flex-1 overflow-y-auto'>
|
||||
{formErrors.general && (
|
||||
<div className='mb-[16px] rounded-[6px] border border-[var(--text-error)]/30 bg-[var(--text-error)]/10 p-[10px]'>
|
||||
<div className='flex items-start gap-[8px]'>
|
||||
<AlertCircle className='mt-0.5 h-4 w-4 shrink-0 text-[var(--text-error)]' />
|
||||
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.general}</p>
|
||||
</div>
|
||||
</div>
|
||||
<p className='mb-[16px] text-[12px] text-[var(--text-error)]'>{formErrors.general}</p>
|
||||
)}
|
||||
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
|
||||
@@ -6,11 +6,11 @@ import { X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn'
|
||||
import { PackageSearchIcon } from '@/components/icons'
|
||||
import type { KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { fetchKnowledgeBase, knowledgeKeys } from '@/hooks/queries/knowledge'
|
||||
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
@@ -38,10 +38,8 @@ export function KnowledgeBaseSelector({
|
||||
error,
|
||||
} = useKnowledgeBasesList(workspaceId)
|
||||
|
||||
// Use the proper hook to get the current value and setter - this prevents infinite loops
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
const isMultiSelect = subBlock.multiSelect === true
|
||||
|
||||
@@ -35,7 +35,7 @@ import { getDependsOnFields } from '@/blocks/utils'
|
||||
import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
|
||||
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useKnowledgeBaseName } from '@/hooks/use-knowledge-base-name'
|
||||
import { useKnowledgeBase } from '@/hooks/use-knowledge'
|
||||
import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -409,11 +409,10 @@ const SubBlockRow = ({
|
||||
planId: planIdValue,
|
||||
})
|
||||
|
||||
const knowledgeBaseDisplayName = useKnowledgeBaseName(
|
||||
subBlock?.type === 'knowledge-base-selector' && typeof rawValue === 'string'
|
||||
? rawValue
|
||||
: undefined
|
||||
const { knowledgeBase: kbForDisplayName } = useKnowledgeBase(
|
||||
subBlock?.type === 'knowledge-base-selector' && typeof rawValue === 'string' ? rawValue : ''
|
||||
)
|
||||
const knowledgeBaseDisplayName = kbForDisplayName?.name ?? null
|
||||
|
||||
const workflowMap = useWorkflowRegistry((state) => state.workflows)
|
||||
const workflowSelectionName =
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Badge, Button } from '@/components/emcn'
|
||||
import { UserAvatar } from '@/components/user-avatar/user-avatar'
|
||||
import { Avatar, AvatarFallback, AvatarImage, Badge, Button } from '@/components/emcn'
|
||||
import type { Invitation, Member, Organization } from '@/lib/workspaces/organization'
|
||||
import { getUserColor } from '@/app/workspace/[workspaceId]/w/utils/get-user-color'
|
||||
import { useCancelInvitation, useOrganizationMembers } from '@/hooks/queries/organization'
|
||||
|
||||
const logger = createLogger('TeamMembers')
|
||||
@@ -45,17 +45,14 @@ export function TeamMembers({
|
||||
isAdminOrOwner,
|
||||
onRemoveMember,
|
||||
}: TeamMembersProps) {
|
||||
// Track which invitations are being cancelled for individual loading states
|
||||
const [cancellingInvitations, setCancellingInvitations] = useState<Set<string>>(new Set())
|
||||
|
||||
// Fetch member usage data using React Query
|
||||
const { data: memberUsageResponse, isLoading: isLoadingUsage } = useOrganizationMembers(
|
||||
organization?.id || ''
|
||||
)
|
||||
|
||||
const cancelInvitationMutation = useCancelInvitation()
|
||||
|
||||
// Build usage data map from response
|
||||
const memberUsageData: Record<string, number> = {}
|
||||
if (memberUsageResponse?.data) {
|
||||
memberUsageResponse.data.forEach(
|
||||
@@ -67,10 +64,8 @@ export function TeamMembers({
|
||||
)
|
||||
}
|
||||
|
||||
// Combine members and pending invitations into a single list
|
||||
const teamItems: TeamMemberItem[] = []
|
||||
|
||||
// Add existing members
|
||||
if (organization.members) {
|
||||
organization.members.forEach((member: Member) => {
|
||||
const userId = member.user?.id
|
||||
@@ -94,7 +89,6 @@ export function TeamMembers({
|
||||
})
|
||||
}
|
||||
|
||||
// Add pending invitations
|
||||
const pendingInvitations = organization.invitations?.filter(
|
||||
(invitation) => invitation.status === 'pending'
|
||||
)
|
||||
@@ -109,7 +103,7 @@ export function TeamMembers({
|
||||
email: invitation.email,
|
||||
avatarInitial: emailPrefix.charAt(0).toUpperCase(),
|
||||
avatarUrl: null,
|
||||
userId: invitation.email, // Use email as fallback for color generation
|
||||
userId: invitation.email,
|
||||
usage: '-',
|
||||
invitation,
|
||||
}
|
||||
@@ -122,7 +116,6 @@ export function TeamMembers({
|
||||
return <div className='text-center text-[var(--text-muted)] text-sm'>No team members yet.</div>
|
||||
}
|
||||
|
||||
// Check if current user can leave (is a member but not owner)
|
||||
const currentUserMember = organization.members?.find((m) => m.user?.email === currentUserEmail)
|
||||
const canLeaveOrganization =
|
||||
currentUserMember && currentUserMember.role !== 'owner' && currentUserMember.user?.id
|
||||
@@ -149,24 +142,27 @@ export function TeamMembers({
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
{/* Header - simple like account page */}
|
||||
{/* Header */}
|
||||
<div>
|
||||
<h4 className='font-medium text-[14px] text-[var(--text-primary)]'>Team Members</h4>
|
||||
</div>
|
||||
|
||||
{/* Members list - clean like account page */}
|
||||
{/* Members list */}
|
||||
<div className='flex flex-col gap-[16px]'>
|
||||
{teamItems.map((item) => (
|
||||
<div key={item.id} className='flex items-center justify-between'>
|
||||
{/* Left section: Avatar + Name/Role + Action buttons */}
|
||||
<div className='flex flex-1 items-center gap-[12px]'>
|
||||
{/* Avatar */}
|
||||
<UserAvatar
|
||||
userId={item.userId || item.email}
|
||||
userName={item.name}
|
||||
avatarUrl={item.avatarUrl}
|
||||
size={32}
|
||||
/>
|
||||
<Avatar size='sm'>
|
||||
{item.avatarUrl && <AvatarImage src={item.avatarUrl} alt={item.name} />}
|
||||
<AvatarFallback
|
||||
style={{ background: getUserColor(item.userId || item.email) }}
|
||||
className='border-0 text-white'
|
||||
>
|
||||
{item.avatarInitial}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
|
||||
{/* Name and email */}
|
||||
<div className='min-w-0'>
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { type CSSProperties, useEffect, useState } from 'react'
|
||||
import Image from 'next/image'
|
||||
import { getUserColor } from '@/app/workspace/[workspaceId]/w/utils/get-user-color'
|
||||
|
||||
interface UserAvatarProps {
|
||||
userId: string
|
||||
userName?: string | null
|
||||
avatarUrl?: string | null
|
||||
size?: number
|
||||
className?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable user avatar component with error handling for image loading.
|
||||
* Falls back to colored circle with initials if image fails to load or is not available.
|
||||
*/
|
||||
export function UserAvatar({
|
||||
userId,
|
||||
userName,
|
||||
avatarUrl,
|
||||
size = 32,
|
||||
className = '',
|
||||
}: UserAvatarProps) {
|
||||
const [imageError, setImageError] = useState(false)
|
||||
const color = getUserColor(userId)
|
||||
const initials = userName ? userName.charAt(0).toUpperCase() : '?'
|
||||
const hasAvatar = Boolean(avatarUrl) && !imageError
|
||||
|
||||
// Reset error state when avatar URL changes
|
||||
useEffect(() => {
|
||||
setImageError(false)
|
||||
}, [avatarUrl])
|
||||
|
||||
const fontSize = Math.max(10, size / 2.5)
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`relative flex flex-shrink-0 items-center justify-center overflow-hidden rounded-full font-semibold text-white ${className}`}
|
||||
style={
|
||||
{
|
||||
background: hasAvatar ? undefined : color,
|
||||
width: `${size}px`,
|
||||
height: `${size}px`,
|
||||
fontSize: `${fontSize}px`,
|
||||
} as CSSProperties
|
||||
}
|
||||
>
|
||||
{hasAvatar && avatarUrl ? (
|
||||
<Image
|
||||
src={avatarUrl}
|
||||
alt={userName ? `${userName}'s avatar` : 'User avatar'}
|
||||
fill
|
||||
sizes={`${size}px`}
|
||||
className='object-cover'
|
||||
referrerPolicy='no-referrer'
|
||||
unoptimized
|
||||
onError={() => setImageError(true)}
|
||||
/>
|
||||
) : (
|
||||
initials
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,14 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { keepPreviousData, useQuery } from '@tanstack/react-query'
|
||||
import type {
|
||||
ChunkData,
|
||||
ChunksPagination,
|
||||
DocumentData,
|
||||
DocumentsPagination,
|
||||
KnowledgeBaseData,
|
||||
} from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('KnowledgeQueries')
|
||||
} from '@/lib/knowledge/types'
|
||||
|
||||
export const knowledgeKeys = {
|
||||
all: ['knowledge'] as const,
|
||||
@@ -17,14 +14,10 @@ export const knowledgeKeys = {
|
||||
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
|
||||
documents: (knowledgeBaseId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
|
||||
document: (knowledgeBaseId: string, documentId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'document', documentId] as const,
|
||||
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
|
||||
[
|
||||
...knowledgeKeys.detail(knowledgeBaseId),
|
||||
'document',
|
||||
documentId,
|
||||
'chunks',
|
||||
paramsKey,
|
||||
] as const,
|
||||
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'chunks', paramsKey] as const,
|
||||
}
|
||||
|
||||
export async function fetchKnowledgeBases(workspaceId?: string): Promise<KnowledgeBaseData[]> {
|
||||
@@ -58,6 +51,27 @@ export async function fetchKnowledgeBase(knowledgeBaseId: string): Promise<Knowl
|
||||
return result.data
|
||||
}
|
||||
|
||||
export async function fetchDocument(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string
|
||||
): Promise<DocumentData> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('Document not found')
|
||||
}
|
||||
throw new Error(`Failed to fetch document: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success || !result?.data) {
|
||||
throw new Error(result?.error || 'Failed to fetch document')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsParams {
|
||||
knowledgeBaseId: string
|
||||
search?: string
|
||||
@@ -192,6 +206,15 @@ export function useKnowledgeBaseQuery(knowledgeBaseId?: string) {
|
||||
})
|
||||
}
|
||||
|
||||
export function useDocumentQuery(knowledgeBaseId?: string, documentId?: string) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.document(knowledgeBaseId ?? '', documentId ?? ''),
|
||||
queryFn: () => fetchDocument(knowledgeBaseId as string, documentId as string),
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
JSON.stringify({
|
||||
search: params.search ?? '',
|
||||
@@ -212,6 +235,7 @@ export function useKnowledgeDocumentsQuery(
|
||||
queryKey: knowledgeKeys.documents(params.knowledgeBaseId, paramsKey),
|
||||
queryFn: () => fetchKnowledgeDocuments(params),
|
||||
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
@@ -234,64 +258,7 @@ export function useKnowledgeChunksQuery(
|
||||
queryKey: knowledgeKeys.chunks(params.knowledgeBaseId, params.documentId, paramsKey),
|
||||
queryFn: () => fetchKnowledgeChunks(params),
|
||||
enabled: (options?.enabled ?? true) && Boolean(params.knowledgeBaseId && params.documentId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface UpdateDocumentPayload {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
updates: Partial<DocumentData>
|
||||
}
|
||||
|
||||
export function useMutateKnowledgeDocument() {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: async ({ knowledgeBaseId, documentId, updates }: UpdateDocumentPayload) => {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(updates),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(errorData.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to update document')
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
onMutate: async ({ knowledgeBaseId, documentId, updates }) => {
|
||||
await queryClient.cancelQueries({ queryKey: knowledgeKeys.detail(knowledgeBaseId) })
|
||||
|
||||
const documentQueries = queryClient
|
||||
.getQueriesData<KnowledgeDocumentsResponse>({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
.filter(([key]) => Array.isArray(key) && key.includes('documents'))
|
||||
|
||||
documentQueries.forEach(([key, data]) => {
|
||||
if (!data) return
|
||||
queryClient.setQueryData(key, {
|
||||
...data,
|
||||
documents: data.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
),
|
||||
})
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to mutate document', error)
|
||||
},
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: knowledgeKeys.detail(variables.knowledgeBaseId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -10,12 +10,10 @@ export function useDebounce<T>(value: T, delay: number): T {
|
||||
const [debouncedValue, setDebouncedValue] = useState<T>(value)
|
||||
|
||||
useEffect(() => {
|
||||
// Set a timeout to update the debounced value after the delay
|
||||
const timer = setTimeout(() => {
|
||||
setDebouncedValue(value)
|
||||
}, delay)
|
||||
|
||||
// Clean up the timeout if the value changes before the delay has passed
|
||||
return () => {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
|
||||
@@ -81,12 +81,10 @@ export function useExecutionStream() {
|
||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||
const { workflowId, callbacks = {}, ...payload } = options
|
||||
|
||||
// Cancel any existing execution
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort()
|
||||
}
|
||||
|
||||
// Create new abort controller
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
currentExecutionRef.current = null
|
||||
@@ -115,7 +113,6 @@ export function useExecutionStream() {
|
||||
currentExecutionRef.current = { workflowId, executionId }
|
||||
}
|
||||
|
||||
// Read SSE stream
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
@@ -128,13 +125,10 @@ export function useExecutionStream() {
|
||||
break
|
||||
}
|
||||
|
||||
// Decode chunk and add to buffer
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
// Process complete SSE messages
|
||||
const lines = buffer.split('\n\n')
|
||||
|
||||
// Keep the last incomplete message in the buffer
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
@@ -144,7 +138,6 @@ export function useExecutionStream() {
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
|
||||
// Check for [DONE] marker
|
||||
if (data === '[DONE]') {
|
||||
logger.info('Stream completed')
|
||||
continue
|
||||
@@ -153,14 +146,12 @@ export function useExecutionStream() {
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
// Log all SSE events for debugging
|
||||
logger.info('📡 SSE Event received:', {
|
||||
type: event.type,
|
||||
executionId: event.executionId,
|
||||
data: event.data,
|
||||
})
|
||||
|
||||
// Dispatch event to appropriate callback
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
logger.info('🚀 Execution started')
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
|
||||
const logger = createLogger('useFocusOnBlock')
|
||||
|
||||
/**
|
||||
* Hook to focus the canvas on a specific block with smooth animation.
|
||||
* Can be called from any component within the workflow (editor, toolbar, action bar, etc.).
|
||||
*
|
||||
* @returns Function to focus on a block by its ID
|
||||
*
|
||||
* @example
|
||||
* const focusOnBlock = useFocusOnBlock()
|
||||
* focusOnBlock('block-id-123')
|
||||
*/
|
||||
export function useFocusOnBlock() {
|
||||
const { getNodes, fitView } = useReactFlow()
|
||||
|
||||
return useCallback(
|
||||
(blockId: string) => {
|
||||
if (!blockId) {
|
||||
logger.warn('Cannot focus on block: no blockId provided')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the node exists
|
||||
const node = getNodes().find((n) => n.id === blockId)
|
||||
if (!node) {
|
||||
logger.warn('Cannot focus on block: block not found', { blockId })
|
||||
return
|
||||
}
|
||||
|
||||
// Focus on the specific node with smooth animation
|
||||
fitView({
|
||||
nodes: [node],
|
||||
duration: 400,
|
||||
padding: 0.3,
|
||||
minZoom: 0.5,
|
||||
maxZoom: 1.0,
|
||||
})
|
||||
|
||||
logger.info('Focused on block', { blockId })
|
||||
} catch (err) {
|
||||
logger.error('Failed to focus on block', { err, blockId })
|
||||
}
|
||||
},
|
||||
[getNodes, fitView]
|
||||
)
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
|
||||
export function useKnowledgeBaseName(knowledgeBaseId?: string | null) {
|
||||
const getCachedKnowledgeBase = useKnowledgeStore((state) => state.getCachedKnowledgeBase)
|
||||
const getKnowledgeBase = useKnowledgeStore((state) => state.getKnowledgeBase)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
const cached = knowledgeBaseId ? getCachedKnowledgeBase(knowledgeBaseId) : null
|
||||
|
||||
useEffect(() => {
|
||||
if (!knowledgeBaseId || cached || isLoading) return
|
||||
setIsLoading(true)
|
||||
getKnowledgeBase(knowledgeBaseId)
|
||||
.catch(() => {
|
||||
// ignore
|
||||
})
|
||||
.finally(() => setIsLoading(false))
|
||||
}, [knowledgeBaseId, cached, isLoading, getKnowledgeBase])
|
||||
|
||||
return cached?.name ?? null
|
||||
}
|
||||
@@ -57,22 +57,6 @@ export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
|
||||
}
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
const getTagLabel = useCallback(
|
||||
(tagSlot: string): string => {
|
||||
const definition = tagDefinitions.find((def) => def.tagSlot === tagSlot)
|
||||
return definition?.displayName || tagSlot
|
||||
},
|
||||
[tagDefinitions]
|
||||
)
|
||||
|
||||
const getTagDefinition = useCallback(
|
||||
(tagSlot: string): TagDefinition | undefined => {
|
||||
return tagDefinitions.find((def) => def.tagSlot === tagSlot)
|
||||
},
|
||||
[tagDefinitions]
|
||||
)
|
||||
|
||||
// Auto-fetch on mount and when dependencies change
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
@@ -82,7 +66,5 @@ export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
|
||||
isLoading,
|
||||
error,
|
||||
fetchTagDefinitions,
|
||||
getTagLabel,
|
||||
getTagDefinition,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +1,30 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import Fuse from 'fuse.js'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
fetchKnowledgeChunks,
|
||||
type KnowledgeChunksResponse,
|
||||
type KnowledgeDocumentsResponse,
|
||||
knowledgeKeys,
|
||||
serializeChunkParams,
|
||||
serializeDocumentParams,
|
||||
useDocumentQuery,
|
||||
useKnowledgeBaseQuery,
|
||||
useKnowledgeBasesQuery,
|
||||
useKnowledgeChunksQuery,
|
||||
useKnowledgeDocumentsQuery,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
import {
|
||||
type ChunkData,
|
||||
type ChunksPagination,
|
||||
type DocumentData,
|
||||
type DocumentsCache,
|
||||
type DocumentsPagination,
|
||||
type KnowledgeBaseData,
|
||||
useKnowledgeStore,
|
||||
} from '@/stores/knowledge/store'
|
||||
|
||||
const logger = createLogger('UseKnowledgeBase')
|
||||
const DEFAULT_PAGE_SIZE = 50
|
||||
|
||||
/**
|
||||
* Hook to fetch and manage a single knowledge base
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useKnowledgeBase(id: string) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useKnowledgeBaseQuery(id)
|
||||
|
||||
useEffect(() => {
|
||||
if (query.data) {
|
||||
const knowledgeBase = query.data
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
knowledgeBases: {
|
||||
...state.knowledgeBases,
|
||||
[knowledgeBase.id]: knowledgeBase,
|
||||
},
|
||||
}))
|
||||
}
|
||||
}, [query.data])
|
||||
|
||||
const refreshKnowledgeBase = useCallback(async () => {
|
||||
const refresh = useCallback(async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(id),
|
||||
})
|
||||
@@ -49,14 +33,31 @@ export function useKnowledgeBase(id: string) {
|
||||
return {
|
||||
knowledgeBase: query.data ?? null,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
refresh: refreshKnowledgeBase,
|
||||
refresh,
|
||||
}
|
||||
}
|
||||
|
||||
// Constants
|
||||
const DEFAULT_PAGE_SIZE = 50
|
||||
/**
|
||||
* Hook to fetch and manage a single document
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useDocument(knowledgeBaseId: string, documentId: string) {
|
||||
const query = useDocumentQuery(knowledgeBaseId, documentId)
|
||||
|
||||
return {
|
||||
document: query.data ?? null,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch and manage documents for a knowledge base
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useKnowledgeBaseDocuments(
|
||||
knowledgeBaseId: string,
|
||||
options?: {
|
||||
@@ -71,16 +72,13 @@ export function useKnowledgeBaseDocuments(
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const requestSearch = options?.search
|
||||
const requestSortBy = options?.sortBy
|
||||
const requestSortOrder = options?.sortOrder
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
search: requestSearch,
|
||||
sortBy: requestSortBy,
|
||||
sortOrder: requestSortOrder,
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
})
|
||||
|
||||
const query = useKnowledgeDocumentsQuery(
|
||||
@@ -88,79 +86,43 @@ export function useKnowledgeBaseDocuments(
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
search: requestSearch,
|
||||
sortBy: requestSortBy,
|
||||
sortOrder: requestSortOrder,
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
}
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (!query.data || !knowledgeBaseId) return
|
||||
const documentsCache = {
|
||||
documents: query.data.documents,
|
||||
pagination: query.data.pagination,
|
||||
searchQuery: requestSearch,
|
||||
sortBy: requestSortBy,
|
||||
sortOrder: requestSortOrder,
|
||||
lastFetchTime: Date.now(),
|
||||
}
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: documentsCache,
|
||||
},
|
||||
}))
|
||||
}, [query.data, knowledgeBaseId, requestSearch, requestSortBy, requestSortOrder])
|
||||
|
||||
const documents = query.data?.documents ?? []
|
||||
const pagination =
|
||||
query.data?.pagination ??
|
||||
({
|
||||
total: 0,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
hasMore: false,
|
||||
} satisfies DocumentsCache['pagination'])
|
||||
const pagination = query.data?.pagination ?? {
|
||||
total: 0,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const refreshDocumentsData = useCallback(async () => {
|
||||
const refreshDocuments = useCallback(async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId, paramsKey])
|
||||
|
||||
const updateDocumentLocal = useCallback(
|
||||
const updateDocument = useCallback(
|
||||
(documentId: string, updates: Partial<DocumentData>) => {
|
||||
queryClient.setQueryData<{
|
||||
documents: DocumentData[]
|
||||
pagination: DocumentsPagination
|
||||
}>(knowledgeKeys.documents(knowledgeBaseId, paramsKey), (previous) => {
|
||||
if (!previous) return previous
|
||||
return {
|
||||
...previous,
|
||||
documents: previous.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
),
|
||||
queryClient.setQueryData<KnowledgeDocumentsResponse>(
|
||||
knowledgeKeys.documents(knowledgeBaseId, paramsKey),
|
||||
(previous) => {
|
||||
if (!previous) return previous
|
||||
return {
|
||||
...previous,
|
||||
documents: previous.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
),
|
||||
}
|
||||
}
|
||||
})
|
||||
useKnowledgeStore.setState((state) => {
|
||||
const existing = state.documents[knowledgeBaseId]
|
||||
if (!existing) return state
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: {
|
||||
...existing,
|
||||
documents: existing.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
logger.info(`Updated document ${documentId} for knowledge base ${knowledgeBaseId}`)
|
||||
)
|
||||
},
|
||||
[knowledgeBaseId, paramsKey, queryClient]
|
||||
)
|
||||
@@ -169,12 +131,18 @@ export function useKnowledgeBaseDocuments(
|
||||
documents,
|
||||
pagination,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
isPlaceholderData: query.isPlaceholderData,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
refreshDocuments: refreshDocumentsData,
|
||||
updateDocument: updateDocumentLocal,
|
||||
refreshDocuments,
|
||||
updateDocument,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch and manage knowledge bases list
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useKnowledgeBasesList(
|
||||
workspaceId?: string,
|
||||
options?: {
|
||||
@@ -183,50 +151,6 @@ export function useKnowledgeBasesList(
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useKnowledgeBasesQuery(workspaceId, { enabled: options?.enabled ?? true })
|
||||
useEffect(() => {
|
||||
if (query.data) {
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
knowledgeBasesList: query.data as KnowledgeBaseData[],
|
||||
knowledgeBasesListLoaded: true,
|
||||
loadingKnowledgeBasesList: query.isLoading,
|
||||
knowledgeBases: query.data!.reduce<Record<string, KnowledgeBaseData>>(
|
||||
(acc, kb) => {
|
||||
acc[kb.id] = kb
|
||||
return acc
|
||||
},
|
||||
{ ...state.knowledgeBases }
|
||||
),
|
||||
}))
|
||||
} else if (query.isLoading) {
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
loadingKnowledgeBasesList: true,
|
||||
}))
|
||||
}
|
||||
}, [query.data, query.isLoading])
|
||||
|
||||
const addKnowledgeBase = useCallback(
|
||||
(knowledgeBase: KnowledgeBaseData) => {
|
||||
queryClient.setQueryData<KnowledgeBaseData[]>(
|
||||
knowledgeKeys.list(workspaceId),
|
||||
(previous = []) => {
|
||||
if (previous.some((kb) => kb.id === knowledgeBase.id)) {
|
||||
return previous
|
||||
}
|
||||
return [knowledgeBase, ...previous]
|
||||
}
|
||||
)
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
knowledgeBases: {
|
||||
...state.knowledgeBases,
|
||||
[knowledgeBase.id]: knowledgeBase,
|
||||
},
|
||||
knowledgeBasesList: state.knowledgeBasesList.some((kb) => kb.id === knowledgeBase.id)
|
||||
? state.knowledgeBasesList
|
||||
: [knowledgeBase, ...state.knowledgeBasesList],
|
||||
}))
|
||||
},
|
||||
[queryClient, workspaceId]
|
||||
)
|
||||
|
||||
const removeKnowledgeBase = useCallback(
|
||||
(knowledgeBaseId: string) => {
|
||||
@@ -234,12 +158,19 @@ export function useKnowledgeBasesList(
|
||||
knowledgeKeys.list(workspaceId),
|
||||
(previous) => previous?.filter((kb) => kb.id !== knowledgeBaseId) ?? []
|
||||
)
|
||||
useKnowledgeStore.setState((state) => ({
|
||||
knowledgeBases: Object.fromEntries(
|
||||
Object.entries(state.knowledgeBases).filter(([id]) => id !== knowledgeBaseId)
|
||||
),
|
||||
knowledgeBasesList: state.knowledgeBasesList.filter((kb) => kb.id !== knowledgeBaseId),
|
||||
}))
|
||||
},
|
||||
[queryClient, workspaceId]
|
||||
)
|
||||
|
||||
const updateKnowledgeBase = useCallback(
|
||||
(id: string, updates: Partial<KnowledgeBaseData>) => {
|
||||
queryClient.setQueryData<KnowledgeBaseData[]>(
|
||||
knowledgeKeys.list(workspaceId),
|
||||
(previous) => previous?.map((kb) => (kb.id === id ? { ...kb, ...updates } : kb)) ?? []
|
||||
)
|
||||
queryClient.setQueryData<KnowledgeBaseData>(knowledgeKeys.detail(id), (previous) =>
|
||||
previous ? { ...previous, ...updates } : previous
|
||||
)
|
||||
},
|
||||
[queryClient, workspaceId]
|
||||
)
|
||||
@@ -248,393 +179,113 @@ export function useKnowledgeBasesList(
|
||||
await queryClient.invalidateQueries({ queryKey: knowledgeKeys.list(workspaceId) })
|
||||
}, [queryClient, workspaceId])
|
||||
|
||||
const forceRefresh = refreshList
|
||||
|
||||
return {
|
||||
knowledgeBases: query.data ?? [],
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
isPlaceholderData: query.isPlaceholderData,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
refreshList,
|
||||
forceRefresh,
|
||||
addKnowledgeBase,
|
||||
removeKnowledgeBase,
|
||||
retryCount: 0,
|
||||
maxRetries: 0,
|
||||
updateKnowledgeBase,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to manage chunks for a specific document with optional client-side search
|
||||
* Hook to manage chunks for a specific document
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useDocumentChunks(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
urlPage = 1,
|
||||
urlSearch = '',
|
||||
options: { enableClientSearch?: boolean } = {}
|
||||
page = 1,
|
||||
search = ''
|
||||
) {
|
||||
const { enableClientSearch = false } = options
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const [chunks, setChunks] = useState<ChunkData[]>([])
|
||||
const [allChunks, setAllChunks] = useState<ChunkData[]>([])
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [pagination, setPagination] = useState({
|
||||
total: 0,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
})
|
||||
const currentPage = Math.max(1, page)
|
||||
const offset = (currentPage - 1) * DEFAULT_PAGE_SIZE
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [currentPage, setCurrentPage] = useState(urlPage)
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentPage(urlPage)
|
||||
}, [urlPage])
|
||||
|
||||
useEffect(() => {
|
||||
if (!enableClientSearch) return
|
||||
setSearchQuery(urlSearch)
|
||||
}, [enableClientSearch, urlSearch])
|
||||
|
||||
if (enableClientSearch) {
|
||||
const loadAllChunks = useCallback(async () => {
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
|
||||
try {
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
const aggregated: ChunkData[] = []
|
||||
const limit = DEFAULT_PAGE_SIZE
|
||||
let offset = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore) {
|
||||
const { chunks: batch, pagination: batchPagination } = await fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit,
|
||||
offset,
|
||||
})
|
||||
|
||||
aggregated.push(...batch)
|
||||
hasMore = batchPagination.hasMore
|
||||
offset = batchPagination.offset + batchPagination.limit
|
||||
}
|
||||
|
||||
setAllChunks(aggregated)
|
||||
setChunks(aggregated)
|
||||
setPagination({
|
||||
total: aggregated.length,
|
||||
limit,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
})
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Failed to load chunks'
|
||||
setError(message)
|
||||
logger.error(`Failed to load chunks for document ${documentId}:`, err)
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [documentId, knowledgeBaseId])
|
||||
|
||||
useEffect(() => {
|
||||
loadAllChunks()
|
||||
}, [loadAllChunks])
|
||||
|
||||
const filteredChunks = useMemo(() => {
|
||||
if (!searchQuery.trim()) return allChunks
|
||||
|
||||
const fuse = new Fuse(allChunks, {
|
||||
keys: ['content'],
|
||||
threshold: 0.3,
|
||||
includeScore: true,
|
||||
includeMatches: true,
|
||||
minMatchCharLength: 2,
|
||||
ignoreLocation: true,
|
||||
})
|
||||
|
||||
const results = fuse.search(searchQuery)
|
||||
return results.map((result) => result.item)
|
||||
}, [allChunks, searchQuery])
|
||||
|
||||
const CHUNKS_PER_PAGE = DEFAULT_PAGE_SIZE
|
||||
const totalPages = Math.max(1, Math.ceil(filteredChunks.length / CHUNKS_PER_PAGE))
|
||||
const hasNextPage = currentPage < totalPages
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const paginatedChunks = useMemo(() => {
|
||||
const startIndex = (currentPage - 1) * CHUNKS_PER_PAGE
|
||||
const endIndex = startIndex + CHUNKS_PER_PAGE
|
||||
return filteredChunks.slice(startIndex, endIndex)
|
||||
}, [filteredChunks, currentPage])
|
||||
|
||||
useEffect(() => {
|
||||
if (currentPage > 1) {
|
||||
setCurrentPage(1)
|
||||
}
|
||||
}, [searchQuery, currentPage])
|
||||
|
||||
useEffect(() => {
|
||||
if (currentPage > totalPages && totalPages > 0) {
|
||||
setCurrentPage(totalPages)
|
||||
}
|
||||
}, [currentPage, totalPages])
|
||||
|
||||
const goToPage = useCallback(
|
||||
(page: number) => {
|
||||
if (page >= 1 && page <= totalPages) {
|
||||
setCurrentPage(page)
|
||||
}
|
||||
},
|
||||
[totalPages]
|
||||
)
|
||||
|
||||
const nextPage = useCallback(() => {
|
||||
if (hasNextPage) {
|
||||
setCurrentPage((prev) => prev + 1)
|
||||
}
|
||||
}, [hasNextPage])
|
||||
|
||||
const prevPage = useCallback(() => {
|
||||
if (hasPrevPage) {
|
||||
setCurrentPage((prev) => prev - 1)
|
||||
}
|
||||
}, [hasPrevPage])
|
||||
|
||||
return {
|
||||
chunks: paginatedChunks,
|
||||
allChunks,
|
||||
filteredChunks,
|
||||
paginatedChunks,
|
||||
searchQuery,
|
||||
setSearchQuery,
|
||||
isLoading,
|
||||
error,
|
||||
pagination: {
|
||||
total: filteredChunks.length,
|
||||
limit: CHUNKS_PER_PAGE,
|
||||
offset: (currentPage - 1) * CHUNKS_PER_PAGE,
|
||||
hasMore: hasNextPage,
|
||||
},
|
||||
currentPage,
|
||||
totalPages,
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
goToPage,
|
||||
nextPage,
|
||||
prevPage,
|
||||
refreshChunks: loadAllChunks,
|
||||
searchChunks: async () => filteredChunks,
|
||||
updateChunk: (chunkId: string, updates: Partial<ChunkData>) => {
|
||||
setAllChunks((previous) =>
|
||||
previous.map((chunk) => (chunk.id === chunkId ? { ...chunk, ...updates } : chunk))
|
||||
)
|
||||
setChunks((previous) =>
|
||||
previous.map((chunk) => (chunk.id === chunkId ? { ...chunk, ...updates } : chunk))
|
||||
)
|
||||
},
|
||||
clearChunks: () => {
|
||||
setAllChunks([])
|
||||
setChunks([])
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const serverCurrentPage = Math.max(1, urlPage)
|
||||
const serverSearchQuery = urlSearch ?? ''
|
||||
const serverLimit = DEFAULT_PAGE_SIZE
|
||||
const serverOffset = (serverCurrentPage - 1) * serverLimit
|
||||
|
||||
const chunkQueryParams = useMemo(
|
||||
() => ({
|
||||
const chunkQuery = useKnowledgeChunksQuery(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: serverLimit,
|
||||
offset: serverOffset,
|
||||
search: serverSearchQuery ? serverSearchQuery : undefined,
|
||||
}),
|
||||
[documentId, knowledgeBaseId, serverLimit, serverOffset, serverSearchQuery]
|
||||
)
|
||||
|
||||
const chunkParamsKey = useMemo(() => serializeChunkParams(chunkQueryParams), [chunkQueryParams])
|
||||
|
||||
const chunkQuery = useKnowledgeChunksQuery(chunkQueryParams, {
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (chunkQuery.data) {
|
||||
setChunks(chunkQuery.data.chunks)
|
||||
setPagination(chunkQuery.data.pagination)
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
},
|
||||
{
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
}
|
||||
}, [chunkQuery.data])
|
||||
|
||||
useEffect(() => {
|
||||
setIsLoading(chunkQuery.isFetching || chunkQuery.isLoading)
|
||||
}, [chunkQuery.isFetching, chunkQuery.isLoading])
|
||||
|
||||
useEffect(() => {
|
||||
const message = chunkQuery.error instanceof Error ? chunkQuery.error.message : chunkQuery.error
|
||||
setError(message ?? null)
|
||||
}, [chunkQuery.error])
|
||||
|
||||
const totalPages = Math.max(
|
||||
1,
|
||||
Math.ceil(
|
||||
(pagination.total || 0) /
|
||||
(pagination.limit && pagination.limit > 0 ? pagination.limit : DEFAULT_PAGE_SIZE)
|
||||
)
|
||||
)
|
||||
const hasNextPage = serverCurrentPage < totalPages
|
||||
const hasPrevPage = serverCurrentPage > 1
|
||||
|
||||
const chunks = chunkQuery.data?.chunks ?? []
|
||||
const pagination = chunkQuery.data?.pagination ?? {
|
||||
total: 0,
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
}
|
||||
const totalPages = Math.max(1, Math.ceil(pagination.total / DEFAULT_PAGE_SIZE))
|
||||
const hasNextPage = currentPage < totalPages
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const goToPage = useCallback(
|
||||
async (page: number) => {
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
if (page < 1 || page > totalPages) return
|
||||
|
||||
const offset = (page - 1) * serverLimit
|
||||
const paramsKey = serializeChunkParams({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: serverLimit,
|
||||
offset,
|
||||
search: chunkQueryParams.search,
|
||||
})
|
||||
|
||||
await queryClient.fetchQuery({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
queryFn: () =>
|
||||
fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: serverLimit,
|
||||
offset,
|
||||
search: chunkQueryParams.search,
|
||||
}),
|
||||
})
|
||||
async (newPage: number) => {
|
||||
if (newPage < 1 || newPage > totalPages) return
|
||||
},
|
||||
[chunkQueryParams.search, documentId, knowledgeBaseId, queryClient, serverLimit, totalPages]
|
||||
[totalPages]
|
||||
)
|
||||
|
||||
const nextPage = useCallback(async () => {
|
||||
if (hasNextPage) {
|
||||
await goToPage(serverCurrentPage + 1)
|
||||
}
|
||||
}, [goToPage, hasNextPage, serverCurrentPage])
|
||||
|
||||
const prevPage = useCallback(async () => {
|
||||
if (hasPrevPage) {
|
||||
await goToPage(serverCurrentPage - 1)
|
||||
}
|
||||
}, [goToPage, hasPrevPage, serverCurrentPage])
|
||||
|
||||
const refreshChunksData = useCallback(async () => {
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, chunkParamsKey),
|
||||
const refreshChunks = useCallback(async () => {
|
||||
const paramsKey = serializeChunkParams({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
})
|
||||
}, [chunkParamsKey, documentId, knowledgeBaseId, queryClient])
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, offset, search, queryClient])
|
||||
|
||||
const searchChunks = useCallback(
|
||||
async (newSearchQuery: string) => {
|
||||
if (!knowledgeBaseId || !documentId) return []
|
||||
const updateChunk = useCallback(
|
||||
(chunkId: string, updates: Partial<ChunkData>) => {
|
||||
const paramsKey = serializeChunkParams({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: serverLimit,
|
||||
offset: 0,
|
||||
search: newSearchQuery || undefined,
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset,
|
||||
search: search || undefined,
|
||||
})
|
||||
|
||||
const result = await queryClient.fetchQuery({
|
||||
queryKey: knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
queryFn: () =>
|
||||
fetchKnowledgeChunks({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
limit: serverLimit,
|
||||
offset: 0,
|
||||
search: newSearchQuery || undefined,
|
||||
}),
|
||||
})
|
||||
|
||||
return result.chunks
|
||||
},
|
||||
[documentId, knowledgeBaseId, queryClient, serverLimit]
|
||||
)
|
||||
|
||||
const updateChunkLocal = useCallback(
|
||||
(chunkId: string, updates: Partial<ChunkData>) => {
|
||||
queryClient.setQueriesData<{
|
||||
chunks: ChunkData[]
|
||||
pagination: ChunksPagination
|
||||
}>(
|
||||
{
|
||||
predicate: (query) =>
|
||||
Array.isArray(query.queryKey) &&
|
||||
query.queryKey[0] === knowledgeKeys.all[0] &&
|
||||
query.queryKey[1] === knowledgeKeys.detail('')[1] &&
|
||||
query.queryKey[2] === knowledgeBaseId &&
|
||||
query.queryKey[3] === 'documents' &&
|
||||
query.queryKey[4] === documentId &&
|
||||
query.queryKey[5] === 'chunks',
|
||||
},
|
||||
(oldData) => {
|
||||
if (!oldData) return oldData
|
||||
queryClient.setQueryData<KnowledgeChunksResponse>(
|
||||
knowledgeKeys.chunks(knowledgeBaseId, documentId, paramsKey),
|
||||
(previous) => {
|
||||
if (!previous) return previous
|
||||
return {
|
||||
...oldData,
|
||||
chunks: oldData.chunks.map((chunk) =>
|
||||
...previous,
|
||||
chunks: previous.chunks.map((chunk) =>
|
||||
chunk.id === chunkId ? { ...chunk, ...updates } : chunk
|
||||
),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
setChunks((previous) =>
|
||||
previous.map((chunk) => (chunk.id === chunkId ? { ...chunk, ...updates } : chunk))
|
||||
)
|
||||
useKnowledgeStore.getState().updateChunk(documentId, chunkId, updates)
|
||||
},
|
||||
[documentId, knowledgeBaseId, queryClient]
|
||||
[knowledgeBaseId, documentId, offset, search, queryClient]
|
||||
)
|
||||
|
||||
const clearChunksLocal = useCallback(() => {
|
||||
useKnowledgeStore.getState().clearChunks(documentId)
|
||||
setChunks([])
|
||||
setPagination({
|
||||
total: 0,
|
||||
limit: DEFAULT_PAGE_SIZE,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
})
|
||||
}, [documentId])
|
||||
|
||||
return {
|
||||
chunks,
|
||||
allChunks: chunks,
|
||||
filteredChunks: chunks,
|
||||
paginatedChunks: chunks,
|
||||
searchQuery: serverSearchQuery,
|
||||
setSearchQuery: () => {},
|
||||
isLoading,
|
||||
error,
|
||||
pagination,
|
||||
currentPage: serverCurrentPage,
|
||||
isLoading: chunkQuery.isLoading,
|
||||
isFetching: chunkQuery.isFetching,
|
||||
error: chunkQuery.error instanceof Error ? chunkQuery.error.message : null,
|
||||
currentPage,
|
||||
totalPages,
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
goToPage,
|
||||
nextPage,
|
||||
prevPage,
|
||||
refreshChunks: refreshChunksData,
|
||||
searchChunks,
|
||||
updateChunk: updateChunkLocal,
|
||||
clearChunks: clearChunksLocal,
|
||||
refreshChunks,
|
||||
updateChunk,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,6 @@ export interface UseMcpToolsResult {
|
||||
isLoading: boolean
|
||||
error: string | null
|
||||
refreshTools: (forceRefresh?: boolean) => Promise<void>
|
||||
getToolById: (toolId: string) => McpToolForUI | undefined
|
||||
getToolsByServer: (serverId: string) => McpToolForUI[]
|
||||
}
|
||||
|
||||
@@ -72,13 +71,6 @@ export function useMcpTools(workspaceId: string): UseMcpToolsResult {
|
||||
[workspaceId, queryClient]
|
||||
)
|
||||
|
||||
const getToolById = useCallback(
|
||||
(toolId: string): McpToolForUI | undefined => {
|
||||
return mcpTools.find((tool) => tool.id === toolId)
|
||||
},
|
||||
[mcpTools]
|
||||
)
|
||||
|
||||
const getToolsByServer = useCallback(
|
||||
(serverId: string): McpToolForUI[] => {
|
||||
return mcpTools.filter((tool) => tool.serverId === serverId)
|
||||
@@ -91,7 +83,6 @@ export function useMcpTools(workspaceId: string): UseMcpToolsResult {
|
||||
isLoading,
|
||||
error: queryError instanceof Error ? queryError.message : null,
|
||||
refreshTools,
|
||||
getToolById,
|
||||
getToolsByServer,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,26 +11,21 @@ import { useCallback, useEffect } from 'react'
|
||||
* - Tab is closed
|
||||
*/
|
||||
export function useStreamCleanup(cleanup: () => void) {
|
||||
// Wrap cleanup function to ensure it's stable
|
||||
const stableCleanup = useCallback(() => {
|
||||
try {
|
||||
cleanup()
|
||||
} catch (error) {
|
||||
// Ignore errors during cleanup to prevent issues during page unload
|
||||
console.warn('Error during stream cleanup:', error)
|
||||
}
|
||||
}, [cleanup])
|
||||
|
||||
useEffect(() => {
|
||||
// Handle page unload/navigation/refresh
|
||||
const handleBeforeUnload = () => {
|
||||
stableCleanup()
|
||||
}
|
||||
|
||||
// Add event listeners
|
||||
window.addEventListener('beforeunload', handleBeforeUnload)
|
||||
|
||||
// Cleanup on component unmount
|
||||
return () => {
|
||||
window.removeEventListener('beforeunload', handleBeforeUnload)
|
||||
stableCleanup()
|
||||
|
||||
@@ -10,7 +10,6 @@ export function useTagSelection(blockId: string, subblockId: string) {
|
||||
|
||||
const emitTagSelectionValue = useCallback(
|
||||
(value: any) => {
|
||||
// Use the collaborative system with immediate processing (no debouncing)
|
||||
collaborativeSetTagSelection(blockId, subblockId, value)
|
||||
},
|
||||
[blockId, subblockId, collaborativeSetTagSelection]
|
||||
|
||||
@@ -96,3 +96,115 @@ export interface ProcessedDocumentTags {
|
||||
// Index signature for dynamic access
|
||||
[key: string]: string | number | Date | boolean | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Frontend/API Types
|
||||
* These types use string dates for JSON serialization
|
||||
*/
|
||||
|
||||
/** Extended chunking config with optional fields */
|
||||
export interface ExtendedChunkingConfig extends ChunkingConfig {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
recipe?: string
|
||||
lang?: string
|
||||
strategy?: 'recursive' | 'semantic' | 'sentence' | 'paragraph'
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
/** Knowledge base data for API responses */
|
||||
export interface KnowledgeBaseData {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
tokenCount: number
|
||||
embeddingModel: string
|
||||
embeddingDimension: number
|
||||
chunkingConfig: ExtendedChunkingConfig
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
workspaceId?: string
|
||||
}
|
||||
|
||||
/** Document data for API responses */
|
||||
export interface DocumentData {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
characterCount: number
|
||||
processingStatus: 'pending' | 'processing' | 'completed' | 'failed'
|
||||
processingStartedAt?: string | null
|
||||
processingCompletedAt?: string | null
|
||||
processingError?: string | null
|
||||
enabled: boolean
|
||||
uploadedAt: string
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
number1?: number | null
|
||||
number2?: number | null
|
||||
number3?: number | null
|
||||
number4?: number | null
|
||||
number5?: number | null
|
||||
date1?: string | null
|
||||
date2?: string | null
|
||||
boolean1?: boolean | null
|
||||
boolean2?: boolean | null
|
||||
boolean3?: boolean | null
|
||||
}
|
||||
|
||||
/** Chunk data for API responses */
|
||||
export interface ChunkData {
|
||||
id: string
|
||||
chunkIndex: number
|
||||
content: string
|
||||
contentLength: number
|
||||
tokenCount: number
|
||||
enabled: boolean
|
||||
startOffset: number
|
||||
endOffset: number
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
number1?: number | null
|
||||
number2?: number | null
|
||||
number3?: number | null
|
||||
number4?: number | null
|
||||
number5?: number | null
|
||||
date1?: string | null
|
||||
date2?: string | null
|
||||
boolean1?: boolean | null
|
||||
boolean2?: boolean | null
|
||||
boolean3?: boolean | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
/** Pagination info for chunks */
|
||||
export interface ChunksPagination {
|
||||
total: number
|
||||
limit: number
|
||||
offset: number
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
/** Pagination info for documents */
|
||||
export interface DocumentsPagination {
|
||||
total: number
|
||||
limit: number
|
||||
offset: number
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
@@ -57,6 +57,40 @@ export function getAccurateTokenCount(text: string, modelName = 'text-embedding-
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get individual tokens as strings for visualization
|
||||
* Returns an array of token strings that can be displayed with colors
|
||||
*/
|
||||
export function getTokenStrings(text: string, modelName = 'text-embedding-3-small'): string[] {
|
||||
if (!text || text.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const encoding = getEncoding(modelName)
|
||||
const tokenIds = encoding.encode(text)
|
||||
|
||||
const textChars = [...text]
|
||||
const result: string[] = []
|
||||
let prevCharCount = 0
|
||||
|
||||
for (let i = 0; i < tokenIds.length; i++) {
|
||||
const decoded = encoding.decode(tokenIds.slice(0, i + 1))
|
||||
const currentCharCount = [...decoded].length
|
||||
const tokenCharCount = currentCharCount - prevCharCount
|
||||
|
||||
const tokenStr = textChars.slice(prevCharCount, prevCharCount + tokenCharCount).join('')
|
||||
result.push(tokenStr)
|
||||
prevCharCount = currentCharCount
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
logger.error('Error getting token strings:', error)
|
||||
return text.split(/(\s+)/).filter((s) => s.length > 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate text to a maximum token count
|
||||
* Useful for handling texts that exceed model limits
|
||||
|
||||
@@ -97,7 +97,6 @@
|
||||
"ffmpeg-static": "5.3.0",
|
||||
"fluent-ffmpeg": "2.1.3",
|
||||
"framer-motion": "^12.5.0",
|
||||
"fuse.js": "7.1.0",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "^4.0.3",
|
||||
"groq-sdk": "^0.15.0",
|
||||
|
||||
@@ -1,923 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { create } from 'zustand'
|
||||
|
||||
const logger = createLogger('KnowledgeStore')
|
||||
|
||||
/**
|
||||
* Configuration for document chunking in knowledge bases
|
||||
*
|
||||
* Units:
|
||||
* - maxSize: Maximum chunk size in TOKENS (1 token ≈ 4 characters)
|
||||
* - minSize: Minimum chunk size in CHARACTERS (floor to avoid tiny fragments)
|
||||
* - overlap: Overlap between chunks in TOKENS (1 token ≈ 4 characters)
|
||||
*/
|
||||
export interface ChunkingConfig {
|
||||
/** Maximum chunk size in tokens (default: 1024, range: 100-4000) */
|
||||
maxSize: number
|
||||
/** Minimum chunk size in characters (default: 100, range: 1-2000) */
|
||||
minSize: number
|
||||
/** Overlap between chunks in tokens (default: 200, range: 0-500) */
|
||||
overlap: number
|
||||
chunkSize?: number // Legacy support
|
||||
minCharactersPerChunk?: number // Legacy support
|
||||
recipe?: string
|
||||
lang?: string
|
||||
strategy?: 'recursive' | 'semantic' | 'sentence' | 'paragraph'
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
export interface KnowledgeBaseData {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
tokenCount: number
|
||||
embeddingModel: string
|
||||
embeddingDimension: number
|
||||
chunkingConfig: ChunkingConfig
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
workspaceId?: string
|
||||
}
|
||||
|
||||
export interface DocumentData {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
characterCount: number
|
||||
processingStatus: 'pending' | 'processing' | 'completed' | 'failed'
|
||||
processingStartedAt?: string | null
|
||||
processingCompletedAt?: string | null
|
||||
processingError?: string | null
|
||||
enabled: boolean
|
||||
uploadedAt: string
|
||||
// Text tags
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
// Number tags (5 slots)
|
||||
number1?: number | null
|
||||
number2?: number | null
|
||||
number3?: number | null
|
||||
number4?: number | null
|
||||
number5?: number | null
|
||||
// Date tags (2 slots)
|
||||
date1?: string | null
|
||||
date2?: string | null
|
||||
// Boolean tags (3 slots)
|
||||
boolean1?: boolean | null
|
||||
boolean2?: boolean | null
|
||||
boolean3?: boolean | null
|
||||
}
|
||||
|
||||
export interface ChunkData {
|
||||
id: string
|
||||
chunkIndex: number
|
||||
content: string
|
||||
contentLength: number
|
||||
tokenCount: number
|
||||
enabled: boolean
|
||||
startOffset: number
|
||||
endOffset: number
|
||||
// Text tags
|
||||
tag1?: string | null
|
||||
tag2?: string | null
|
||||
tag3?: string | null
|
||||
tag4?: string | null
|
||||
tag5?: string | null
|
||||
tag6?: string | null
|
||||
tag7?: string | null
|
||||
// Number tags (5 slots)
|
||||
number1?: number | null
|
||||
number2?: number | null
|
||||
number3?: number | null
|
||||
number4?: number | null
|
||||
number5?: number | null
|
||||
// Date tags (2 slots)
|
||||
date1?: string | null
|
||||
date2?: string | null
|
||||
// Boolean tags (3 slots)
|
||||
boolean1?: boolean | null
|
||||
boolean2?: boolean | null
|
||||
boolean3?: boolean | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface ChunksPagination {
|
||||
total: number
|
||||
limit: number
|
||||
offset: number
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
export interface ChunksCache {
|
||||
chunks: ChunkData[]
|
||||
pagination: ChunksPagination
|
||||
searchQuery?: string
|
||||
lastFetchTime: number
|
||||
}
|
||||
|
||||
export interface DocumentsPagination {
|
||||
total: number
|
||||
limit: number
|
||||
offset: number
|
||||
hasMore: boolean
|
||||
}
|
||||
|
||||
export interface DocumentsCache {
|
||||
documents: DocumentData[]
|
||||
pagination: DocumentsPagination
|
||||
searchQuery?: string
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
lastFetchTime: number
|
||||
}
|
||||
|
||||
interface KnowledgeStore {
|
||||
// State
|
||||
knowledgeBases: Record<string, KnowledgeBaseData>
|
||||
documents: Record<string, DocumentsCache> // knowledgeBaseId -> documents cache
|
||||
chunks: Record<string, ChunksCache> // documentId -> chunks cache
|
||||
knowledgeBasesList: KnowledgeBaseData[]
|
||||
|
||||
// Loading states
|
||||
loadingKnowledgeBases: Set<string>
|
||||
loadingDocuments: Set<string>
|
||||
loadingChunks: Set<string>
|
||||
loadingKnowledgeBasesList: boolean
|
||||
knowledgeBasesListLoaded: boolean
|
||||
|
||||
// Actions
|
||||
getKnowledgeBase: (id: string) => Promise<KnowledgeBaseData | null>
|
||||
getDocuments: (
|
||||
knowledgeBaseId: string,
|
||||
options?: {
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
}
|
||||
) => Promise<DocumentData[]>
|
||||
getChunks: (
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
options?: { search?: string; limit?: number; offset?: number }
|
||||
) => Promise<ChunkData[]>
|
||||
getKnowledgeBasesList: (workspaceId?: string) => Promise<KnowledgeBaseData[]>
|
||||
refreshDocuments: (
|
||||
knowledgeBaseId: string,
|
||||
options?: {
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
}
|
||||
) => Promise<DocumentData[]>
|
||||
refreshChunks: (
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
options?: { search?: string; limit?: number; offset?: number }
|
||||
) => Promise<ChunkData[]>
|
||||
updateDocument: (
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
updates: Partial<DocumentData>
|
||||
) => void
|
||||
updateChunk: (documentId: string, chunkId: string, updates: Partial<ChunkData>) => void
|
||||
addPendingDocuments: (knowledgeBaseId: string, documents: DocumentData[]) => void
|
||||
addKnowledgeBase: (knowledgeBase: KnowledgeBaseData) => void
|
||||
updateKnowledgeBase: (id: string, updates: Partial<KnowledgeBaseData>) => void
|
||||
removeKnowledgeBase: (id: string) => void
|
||||
removeDocument: (knowledgeBaseId: string, documentId: string) => void
|
||||
clearDocuments: (knowledgeBaseId: string) => void
|
||||
clearChunks: (documentId: string) => void
|
||||
clearKnowledgeBasesList: () => void
|
||||
|
||||
// Getters
|
||||
getCachedKnowledgeBase: (id: string) => KnowledgeBaseData | null
|
||||
getCachedDocuments: (knowledgeBaseId: string) => DocumentsCache | null
|
||||
getCachedChunks: (documentId: string, options?: { search?: string }) => ChunksCache | null
|
||||
|
||||
// Loading state getters
|
||||
isKnowledgeBaseLoading: (id: string) => boolean
|
||||
isDocumentsLoading: (knowledgeBaseId: string) => boolean
|
||||
isChunksLoading: (documentId: string) => boolean
|
||||
}
|
||||
|
||||
export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
knowledgeBases: {},
|
||||
documents: {},
|
||||
chunks: {},
|
||||
knowledgeBasesList: [],
|
||||
loadingKnowledgeBases: new Set(),
|
||||
loadingDocuments: new Set(),
|
||||
loadingChunks: new Set(),
|
||||
loadingKnowledgeBasesList: false,
|
||||
knowledgeBasesListLoaded: false,
|
||||
|
||||
getCachedKnowledgeBase: (id: string) => {
|
||||
return get().knowledgeBases[id] || null
|
||||
},
|
||||
|
||||
getCachedDocuments: (knowledgeBaseId: string) => {
|
||||
return get().documents[knowledgeBaseId] || null
|
||||
},
|
||||
|
||||
getCachedChunks: (documentId: string, options?: { search?: string }) => {
|
||||
return get().chunks[documentId] || null
|
||||
},
|
||||
|
||||
isKnowledgeBaseLoading: (id: string) => {
|
||||
return get().loadingKnowledgeBases.has(id)
|
||||
},
|
||||
|
||||
isDocumentsLoading: (knowledgeBaseId: string) => {
|
||||
return get().loadingDocuments.has(knowledgeBaseId)
|
||||
},
|
||||
|
||||
isChunksLoading: (documentId: string) => {
|
||||
return get().loadingChunks.has(documentId)
|
||||
},
|
||||
|
||||
getKnowledgeBase: async (id: string) => {
|
||||
const state = get()
|
||||
|
||||
// Return cached data if it exists
|
||||
const cached = state.knowledgeBases[id]
|
||||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
|
||||
// Return cached data if already loading to prevent duplicate requests
|
||||
if (state.loadingKnowledgeBases.has(id)) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
set((state) => ({
|
||||
loadingKnowledgeBases: new Set([...state.loadingKnowledgeBases, id]),
|
||||
}))
|
||||
|
||||
const response = await fetch(`/api/knowledge/${id}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch knowledge base: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch knowledge base')
|
||||
}
|
||||
|
||||
const knowledgeBase = result.data
|
||||
|
||||
set((state) => ({
|
||||
knowledgeBases: {
|
||||
...state.knowledgeBases,
|
||||
[id]: knowledgeBase,
|
||||
},
|
||||
loadingKnowledgeBases: new Set(
|
||||
[...state.loadingKnowledgeBases].filter((loadingId) => loadingId !== id)
|
||||
),
|
||||
}))
|
||||
|
||||
logger.info(`Knowledge base loaded: ${id}`)
|
||||
return knowledgeBase
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching knowledge base ${id}:`, error)
|
||||
|
||||
set((state) => ({
|
||||
loadingKnowledgeBases: new Set(
|
||||
[...state.loadingKnowledgeBases].filter((loadingId) => loadingId !== id)
|
||||
),
|
||||
}))
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
getDocuments: async (
|
||||
knowledgeBaseId: string,
|
||||
options?: {
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
}
|
||||
) => {
|
||||
const state = get()
|
||||
|
||||
// Check if we have cached data that matches the exact request parameters
|
||||
const cached = state.documents[knowledgeBaseId]
|
||||
const requestLimit = options?.limit || 50
|
||||
const requestOffset = options?.offset || 0
|
||||
const requestSearch = options?.search
|
||||
const requestSortBy = options?.sortBy
|
||||
const requestSortOrder = options?.sortOrder
|
||||
|
||||
if (
|
||||
cached &&
|
||||
cached.searchQuery === requestSearch &&
|
||||
cached.pagination.limit === requestLimit &&
|
||||
cached.pagination.offset === requestOffset &&
|
||||
cached.sortBy === requestSortBy &&
|
||||
cached.sortOrder === requestSortOrder
|
||||
) {
|
||||
return cached.documents
|
||||
}
|
||||
|
||||
// Return empty array if already loading to prevent duplicate requests
|
||||
if (state.loadingDocuments.has(knowledgeBaseId)) {
|
||||
return cached?.documents || []
|
||||
}
|
||||
|
||||
try {
|
||||
set((state) => ({
|
||||
loadingDocuments: new Set([...state.loadingDocuments, knowledgeBaseId]),
|
||||
}))
|
||||
|
||||
// Build query parameters using the same defaults as caching
|
||||
const params = new URLSearchParams()
|
||||
if (requestSearch) params.set('search', requestSearch)
|
||||
if (requestSortBy) params.set('sortBy', requestSortBy)
|
||||
if (requestSortOrder) params.set('sortOrder', requestSortOrder)
|
||||
params.set('limit', requestLimit.toString())
|
||||
params.set('offset', requestOffset.toString())
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch documents: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents = result.data.documents || result.data // Handle both paginated and non-paginated responses
|
||||
const pagination = result.data.pagination || {
|
||||
total: documents.length,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const documentsCache: DocumentsCache = {
|
||||
documents,
|
||||
pagination,
|
||||
searchQuery: requestSearch,
|
||||
sortBy: requestSortBy,
|
||||
sortOrder: requestSortOrder,
|
||||
lastFetchTime: Date.now(),
|
||||
}
|
||||
|
||||
set((state) => ({
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: documentsCache,
|
||||
},
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
),
|
||||
}))
|
||||
|
||||
logger.info(`Documents loaded for knowledge base: ${knowledgeBaseId}`)
|
||||
return documents
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching documents for knowledge base ${knowledgeBaseId}:`, error)
|
||||
|
||||
set((state) => ({
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
),
|
||||
}))
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
getChunks: async (
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
options?: { search?: string; limit?: number; offset?: number }
|
||||
) => {
|
||||
const state = get()
|
||||
|
||||
// Return cached chunks if they exist and match the exact search criteria AND offset
|
||||
const cached = state.chunks[documentId]
|
||||
if (
|
||||
cached &&
|
||||
cached.searchQuery === options?.search &&
|
||||
cached.pagination.offset === (options?.offset || 0) &&
|
||||
cached.pagination.limit === (options?.limit || 50)
|
||||
) {
|
||||
return cached.chunks
|
||||
}
|
||||
|
||||
// Return empty array if already loading to prevent duplicate requests
|
||||
if (state.loadingChunks.has(documentId)) {
|
||||
return cached?.chunks || []
|
||||
}
|
||||
|
||||
try {
|
||||
set((state) => ({
|
||||
loadingChunks: new Set([...state.loadingChunks, documentId]),
|
||||
}))
|
||||
|
||||
// Build query parameters
|
||||
const params = new URLSearchParams()
|
||||
if (options?.search) params.set('search', options.search)
|
||||
if (options?.limit) params.set('limit', options.limit.toString())
|
||||
if (options?.offset) params.set('offset', options.offset.toString())
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks?${params.toString()}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch chunks: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch chunks')
|
||||
}
|
||||
|
||||
const chunks = result.data
|
||||
const pagination = result.pagination
|
||||
|
||||
set((state) => ({
|
||||
chunks: {
|
||||
...state.chunks,
|
||||
[documentId]: {
|
||||
chunks, // Always replace chunks for traditional pagination
|
||||
pagination: {
|
||||
total: pagination?.total || chunks.length,
|
||||
limit: pagination?.limit || options?.limit || 50,
|
||||
offset: pagination?.offset || options?.offset || 0,
|
||||
hasMore: pagination?.hasMore || false,
|
||||
},
|
||||
searchQuery: options?.search,
|
||||
lastFetchTime: Date.now(),
|
||||
},
|
||||
},
|
||||
loadingChunks: new Set(
|
||||
[...state.loadingChunks].filter((loadingId) => loadingId !== documentId)
|
||||
),
|
||||
}))
|
||||
|
||||
logger.info(`Chunks loaded for document: ${documentId}`)
|
||||
return chunks
|
||||
} catch (error) {
|
||||
logger.error(`Error fetching chunks for document ${documentId}:`, error)
|
||||
|
||||
set((state) => ({
|
||||
loadingChunks: new Set(
|
||||
[...state.loadingChunks].filter((loadingId) => loadingId !== documentId)
|
||||
),
|
||||
}))
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
getKnowledgeBasesList: async (workspaceId?: string) => {
|
||||
const state = get()
|
||||
|
||||
// Return cached list if we have already loaded it before (prevents infinite loops when empty)
|
||||
if (state.knowledgeBasesListLoaded) {
|
||||
return state.knowledgeBasesList
|
||||
}
|
||||
|
||||
// Return cached data if already loading
|
||||
if (state.loadingKnowledgeBasesList) {
|
||||
return state.knowledgeBasesList
|
||||
}
|
||||
|
||||
// Create an AbortController for request cancellation
|
||||
const abortController = new AbortController()
|
||||
const timeoutId = setTimeout(() => {
|
||||
abortController.abort()
|
||||
}, 10000) // 10 second timeout
|
||||
|
||||
try {
|
||||
set({ loadingKnowledgeBasesList: true })
|
||||
|
||||
const url = workspaceId ? `/api/knowledge?workspaceId=${workspaceId}` : '/api/knowledge'
|
||||
const response = await fetch(url, {
|
||||
signal: abortController.signal,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
// Clear the timeout since request completed
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch knowledge bases: ${response.status} ${response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch knowledge bases')
|
||||
}
|
||||
|
||||
const knowledgeBasesList = result.data || []
|
||||
|
||||
set({
|
||||
knowledgeBasesList,
|
||||
loadingKnowledgeBasesList: false,
|
||||
knowledgeBasesListLoaded: true, // Mark as loaded regardless of result to prevent infinite loops
|
||||
})
|
||||
|
||||
logger.info(`Knowledge bases list loaded: ${knowledgeBasesList.length} items`)
|
||||
return knowledgeBasesList
|
||||
} catch (error) {
|
||||
// Clear the timeout in case of error
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
logger.error('Error fetching knowledge bases list:', error)
|
||||
|
||||
// Always set loading to false, even on error
|
||||
set({
|
||||
loadingKnowledgeBasesList: false,
|
||||
knowledgeBasesListLoaded: true, // Mark as loaded even on error to prevent infinite retries
|
||||
})
|
||||
|
||||
// Don't throw on AbortError (timeout or cancellation)
|
||||
if (error instanceof Error && error.name === 'AbortError') {
|
||||
logger.warn('Knowledge bases list request was aborted (timeout or cancellation)')
|
||||
return state.knowledgeBasesList // Return whatever we have cached
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
refreshDocuments: async (
|
||||
knowledgeBaseId: string,
|
||||
options?: {
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
}
|
||||
) => {
|
||||
const state = get()
|
||||
|
||||
// Return empty array if already loading to prevent duplicate requests
|
||||
if (state.loadingDocuments.has(knowledgeBaseId)) {
|
||||
return state.documents[knowledgeBaseId]?.documents || []
|
||||
}
|
||||
|
||||
try {
|
||||
set((state) => ({
|
||||
loadingDocuments: new Set([...state.loadingDocuments, knowledgeBaseId]),
|
||||
}))
|
||||
|
||||
// Build query parameters using consistent defaults
|
||||
const requestLimit = options?.limit || 50
|
||||
const requestOffset = options?.offset || 0
|
||||
const requestSearch = options?.search
|
||||
const requestSortBy = options?.sortBy
|
||||
const requestSortOrder = options?.sortOrder
|
||||
|
||||
const params = new URLSearchParams()
|
||||
if (requestSearch) params.set('search', requestSearch)
|
||||
if (requestSortBy) params.set('sortBy', requestSortBy)
|
||||
if (requestSortOrder) params.set('sortOrder', requestSortOrder)
|
||||
params.set('limit', requestLimit.toString())
|
||||
params.set('offset', requestOffset.toString())
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch documents: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents = result.data.documents || result.data
|
||||
const pagination = result.data.pagination || {
|
||||
total: documents.length,
|
||||
limit: requestLimit,
|
||||
offset: requestOffset,
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const documentsCache: DocumentsCache = {
|
||||
documents,
|
||||
pagination,
|
||||
searchQuery: requestSearch,
|
||||
sortBy: requestSortBy,
|
||||
sortOrder: requestSortOrder,
|
||||
lastFetchTime: Date.now(),
|
||||
}
|
||||
|
||||
set((state) => ({
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: documentsCache,
|
||||
},
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
),
|
||||
}))
|
||||
|
||||
logger.info(`Documents refreshed for knowledge base: ${knowledgeBaseId}`)
|
||||
return documents
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing documents for knowledge base ${knowledgeBaseId}:`, error)
|
||||
|
||||
set((state) => ({
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
),
|
||||
}))
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
refreshChunks: async (
|
||||
knowledgeBaseId: string,
|
||||
documentId: string,
|
||||
options?: { search?: string; limit?: number; offset?: number }
|
||||
) => {
|
||||
const state = get()
|
||||
|
||||
// Return cached chunks if already loading to prevent duplicate requests
|
||||
if (state.loadingChunks.has(documentId)) {
|
||||
return state.chunks[documentId]?.chunks || []
|
||||
}
|
||||
|
||||
try {
|
||||
set((state) => ({
|
||||
loadingChunks: new Set([...state.loadingChunks, documentId]),
|
||||
}))
|
||||
|
||||
// Build query parameters - for refresh, always start from offset 0
|
||||
const params = new URLSearchParams()
|
||||
if (options?.search) params.set('search', options.search)
|
||||
if (options?.limit) params.set('limit', options.limit.toString())
|
||||
params.set('offset', '0') // Always start fresh on refresh
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks?${params.toString()}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch chunks: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Failed to fetch chunks')
|
||||
}
|
||||
|
||||
const chunks = result.data
|
||||
const pagination = result.pagination
|
||||
|
||||
set((state) => ({
|
||||
chunks: {
|
||||
...state.chunks,
|
||||
[documentId]: {
|
||||
chunks, // Replace all chunks with fresh data
|
||||
pagination: {
|
||||
total: pagination?.total || chunks.length,
|
||||
limit: pagination?.limit || options?.limit || 50,
|
||||
offset: 0, // Reset to start
|
||||
hasMore: pagination?.hasMore || false,
|
||||
},
|
||||
searchQuery: options?.search,
|
||||
lastFetchTime: Date.now(),
|
||||
},
|
||||
},
|
||||
loadingChunks: new Set(
|
||||
[...state.loadingChunks].filter((loadingId) => loadingId !== documentId)
|
||||
),
|
||||
}))
|
||||
|
||||
logger.info(`Chunks refreshed for document: ${documentId}`)
|
||||
return chunks
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing chunks for document ${documentId}:`, error)
|
||||
|
||||
set((state) => ({
|
||||
loadingChunks: new Set(
|
||||
[...state.loadingChunks].filter((loadingId) => loadingId !== documentId)
|
||||
),
|
||||
}))
|
||||
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
updateDocument: (knowledgeBaseId: string, documentId: string, updates: Partial<DocumentData>) => {
|
||||
set((state) => {
|
||||
const documentsCache = state.documents[knowledgeBaseId]
|
||||
if (!documentsCache) return state
|
||||
|
||||
const updatedDocuments = documentsCache.documents.map((doc) =>
|
||||
doc.id === documentId ? { ...doc, ...updates } : doc
|
||||
)
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: {
|
||||
...documentsCache,
|
||||
documents: updatedDocuments,
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
updateChunk: (documentId: string, chunkId: string, updates: Partial<ChunkData>) => {
|
||||
set((state) => {
|
||||
const cachedChunks = state.chunks[documentId]
|
||||
if (!cachedChunks || !cachedChunks.chunks) return state
|
||||
|
||||
const updatedChunks = cachedChunks.chunks.map((chunk) =>
|
||||
chunk.id === chunkId ? { ...chunk, ...updates } : chunk
|
||||
)
|
||||
|
||||
return {
|
||||
chunks: {
|
||||
...state.chunks,
|
||||
[documentId]: {
|
||||
...cachedChunks,
|
||||
chunks: updatedChunks,
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
addPendingDocuments: (knowledgeBaseId: string, newDocuments: DocumentData[]) => {
|
||||
set((state) => {
|
||||
const existingDocumentsCache = state.documents[knowledgeBaseId]
|
||||
const existingDocuments = existingDocumentsCache?.documents || []
|
||||
|
||||
const existingIds = new Set(existingDocuments.map((doc) => doc.id))
|
||||
const uniqueNewDocuments = newDocuments.filter((doc) => !existingIds.has(doc.id))
|
||||
|
||||
if (uniqueNewDocuments.length === 0) {
|
||||
logger.warn(`No new documents to add - all ${newDocuments.length} documents already exist`)
|
||||
return state
|
||||
}
|
||||
|
||||
const updatedDocuments = [...existingDocuments, ...uniqueNewDocuments]
|
||||
|
||||
const documentsCache: DocumentsCache = {
|
||||
documents: updatedDocuments,
|
||||
pagination: {
|
||||
...(existingDocumentsCache?.pagination || {
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
hasMore: false,
|
||||
}),
|
||||
total: updatedDocuments.length,
|
||||
},
|
||||
searchQuery: existingDocumentsCache?.searchQuery,
|
||||
lastFetchTime: Date.now(),
|
||||
}
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: documentsCache,
|
||||
},
|
||||
}
|
||||
})
|
||||
logger.info(
|
||||
`Added ${newDocuments.filter((doc) => !get().documents[knowledgeBaseId]?.documents?.some((existing) => existing.id === doc.id)).length} pending documents for knowledge base: ${knowledgeBaseId}`
|
||||
)
|
||||
},
|
||||
|
||||
addKnowledgeBase: (knowledgeBase: KnowledgeBaseData) => {
|
||||
set((state) => ({
|
||||
knowledgeBases: {
|
||||
...state.knowledgeBases,
|
||||
[knowledgeBase.id]: knowledgeBase,
|
||||
},
|
||||
knowledgeBasesList: [knowledgeBase, ...state.knowledgeBasesList],
|
||||
}))
|
||||
logger.info(`Knowledge base added: ${knowledgeBase.id}`)
|
||||
},
|
||||
|
||||
updateKnowledgeBase: (id: string, updates: Partial<KnowledgeBaseData>) => {
|
||||
set((state) => {
|
||||
const existingKb = state.knowledgeBases[id]
|
||||
if (!existingKb) return state
|
||||
|
||||
const updatedKb = { ...existingKb, ...updates }
|
||||
|
||||
return {
|
||||
knowledgeBases: {
|
||||
...state.knowledgeBases,
|
||||
[id]: updatedKb,
|
||||
},
|
||||
knowledgeBasesList: state.knowledgeBasesList.map((kb) => (kb.id === id ? updatedKb : kb)),
|
||||
}
|
||||
})
|
||||
logger.info(`Knowledge base updated: ${id}`)
|
||||
},
|
||||
|
||||
removeKnowledgeBase: (id: string) => {
|
||||
set((state) => {
|
||||
const newKnowledgeBases = { ...state.knowledgeBases }
|
||||
delete newKnowledgeBases[id]
|
||||
|
||||
const newDocuments = { ...state.documents }
|
||||
delete newDocuments[id]
|
||||
|
||||
return {
|
||||
knowledgeBases: newKnowledgeBases,
|
||||
documents: newDocuments,
|
||||
knowledgeBasesList: state.knowledgeBasesList.filter((kb) => kb.id !== id),
|
||||
}
|
||||
})
|
||||
logger.info(`Knowledge base removed: ${id}`)
|
||||
},
|
||||
|
||||
removeDocument: (knowledgeBaseId: string, documentId: string) => {
|
||||
set((state) => {
|
||||
const documentsCache = state.documents[knowledgeBaseId]
|
||||
if (!documentsCache) return state
|
||||
|
||||
const updatedDocuments = documentsCache.documents.filter((doc) => doc.id !== documentId)
|
||||
|
||||
// Also clear chunks for the removed document
|
||||
const newChunks = { ...state.chunks }
|
||||
delete newChunks[documentId]
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: {
|
||||
...documentsCache,
|
||||
documents: updatedDocuments,
|
||||
},
|
||||
},
|
||||
chunks: newChunks,
|
||||
}
|
||||
})
|
||||
logger.info(`Document removed from knowledge base: ${documentId}`)
|
||||
},
|
||||
|
||||
clearDocuments: (knowledgeBaseId: string) => {
|
||||
set((state) => {
|
||||
const newDocuments = { ...state.documents }
|
||||
delete newDocuments[knowledgeBaseId]
|
||||
return { documents: newDocuments }
|
||||
})
|
||||
logger.info(`Documents cleared for knowledge base: ${knowledgeBaseId}`)
|
||||
},
|
||||
|
||||
clearChunks: (documentId: string) => {
|
||||
set((state) => {
|
||||
const newChunks = { ...state.chunks }
|
||||
delete newChunks[documentId]
|
||||
return { chunks: newChunks }
|
||||
})
|
||||
logger.info(`Chunks cleared for document: ${documentId}`)
|
||||
},
|
||||
|
||||
clearKnowledgeBasesList: () => {
|
||||
set({
|
||||
knowledgeBasesList: [],
|
||||
knowledgeBasesListLoaded: false, // Reset loaded state to allow reloading
|
||||
})
|
||||
logger.info('Knowledge bases list cleared')
|
||||
},
|
||||
}))
|
||||
4
bun.lock
4
bun.lock
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "simstudio",
|
||||
@@ -126,7 +127,6 @@
|
||||
"ffmpeg-static": "5.3.0",
|
||||
"fluent-ffmpeg": "2.1.3",
|
||||
"framer-motion": "^12.5.0",
|
||||
"fuse.js": "7.1.0",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "^4.0.3",
|
||||
"groq-sdk": "^0.15.0",
|
||||
@@ -2176,8 +2176,6 @@
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"fuse.js": ["fuse.js@7.1.0", "", {}, "sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ=="],
|
||||
|
||||
"gaxios": ["gaxios@7.1.3", "", { "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", "node-fetch": "^3.3.2", "rimraf": "^5.0.1" } }, "sha512-YGGyuEdVIjqxkxVH1pUTMY/XtmmsApXrCVv5EU25iX6inEPbV+VakJfLealkBtJN69AQmh1eGOdCl9Sm1UP6XQ=="],
|
||||
|
||||
"gcp-metadata": ["gcp-metadata@8.1.2", "", { "dependencies": { "gaxios": "^7.0.0", "google-logging-utils": "^1.0.0", "json-bigint": "^1.0.0" } }, "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg=="],
|
||||
|
||||
Reference in New Issue
Block a user