fix(search-chunk): searchbar in knowledge base chunk (#557)

* fix: chunk search bar fix

* fix: fixed reload and refresh

* fix: fixed structure

* fix: need to fix persisting in knowledge search

* fix: adding page as query param

* fix: bun run lint (#557)

* added instantaneous client-side search, added fuzzy search & text highlighting

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: Waleed Latif <walif6@gmail.com>
This commit is contained in:
Adam Gough
2025-07-09 10:52:28 -07:00
committed by GitHub
parent 3421eaec27
commit 4a26b061a4
5 changed files with 419 additions and 176 deletions

View File

@@ -1,23 +1,15 @@
'use client'
import { useCallback, useEffect, useState } from 'react'
import {
ChevronLeft,
ChevronRight,
Circle,
CircleOff,
FileText,
Plus,
Search,
Trash2,
X,
} from 'lucide-react'
import { useParams } from 'next/navigation'
import { ChevronLeft, ChevronRight, Circle, CircleOff, FileText, Plus, Trash2 } from 'lucide-react'
import { useParams, useRouter, useSearchParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import { Checkbox } from '@/components/ui/checkbox'
import { SearchHighlight } from '@/components/ui/search-highlight'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console-logger'
import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/action-bar/action-bar'
import { SearchInput } from '@/app/workspace/[workspaceId]/knowledge/components/search-input/search-input'
import { useDocumentChunks } from '@/hooks/use-knowledge'
import { type ChunkData, type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
import { useSidebarStore } from '@/stores/sidebar/store'
@@ -56,11 +48,35 @@ export function Document({
const { mode, isExpanded } = useSidebarStore()
const { getCachedKnowledgeBase, getCachedDocuments } = useKnowledgeStore()
const { workspaceId } = useParams()
const router = useRouter()
const searchParams = useSearchParams()
const isSidebarCollapsed =
mode === 'expanded' ? !isExpanded : mode === 'collapsed' || mode === 'hover'
const [searchQuery, setSearchQuery] = useState('')
const currentPageFromURL = Number.parseInt(searchParams.get('page') || '1', 10)
const {
chunks: paginatedChunks,
allChunks,
filteredChunks,
searchQuery,
setSearchQuery,
currentPage,
totalPages,
hasNextPage,
hasPrevPage,
goToPage,
nextPage,
prevPage,
isLoading: isLoadingAllChunks,
error: chunksError,
refreshChunks,
updateChunk,
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL, '', {
enableClientSearch: true,
})
const [selectedChunks, setSelectedChunks] = useState<Set<string>>(new Set())
const [selectedChunk, setSelectedChunk] = useState<ChunkData | null>(null)
const [isModalOpen, setIsModalOpen] = useState(false)
@@ -73,61 +89,33 @@ export function Document({
const [isLoadingDocument, setIsLoadingDocument] = useState(true)
const [error, setError] = useState<string | null>(null)
// Use the updated chunks hook with pagination
const {
chunks,
isLoading: isLoadingChunks,
error: chunksError,
currentPage,
totalPages,
hasNextPage,
hasPrevPage,
goToPage,
nextPage,
prevPage,
refreshChunks,
updateChunk,
} = useDocumentChunks(knowledgeBaseId, documentId)
// Combine errors
const combinedError = error || chunksError
// Handle pagination navigation
const handlePrevPage = useCallback(() => {
if (hasPrevPage && !isLoadingChunks) {
prevPage()?.catch((err) => {
logger.error('Previous page failed:', err)
})
}
}, [hasPrevPage, isLoadingChunks, prevPage])
const handleNextPage = useCallback(() => {
if (hasNextPage && !isLoadingChunks) {
nextPage()?.catch((err) => {
logger.error('Next page failed:', err)
})
}
}, [hasNextPage, isLoadingChunks, nextPage])
const handleGoToPage = useCallback(
(page: number) => {
if (page !== currentPage && !isLoadingChunks) {
goToPage(page)?.catch((err) => {
logger.error('Go to page failed:', err)
})
// URL synchronization for pagination
const updatePageInURL = useCallback(
(newPage: number) => {
const params = new URLSearchParams(searchParams)
if (newPage > 1) {
params.set('page', newPage.toString())
} else {
params.delete('page')
}
router.replace(`?${params.toString()}`, { scroll: false })
},
[currentPage, isLoadingChunks, goToPage]
[router, searchParams]
)
// Try to get document from store cache first, then fetch if needed
// Sync URL when page changes
useEffect(() => {
updatePageInURL(currentPage)
}, [currentPage, updatePageInURL])
useEffect(() => {
const fetchDocument = async () => {
try {
setIsLoadingDocument(true)
setError(null)
// First try to get from cached documents in the store
const cachedDocuments = getCachedDocuments(knowledgeBaseId)
const cachedDoc = cachedDocuments?.find((d) => d.id === documentId)
@@ -137,7 +125,6 @@ export function Document({
return
}
// If not in cache, fetch from API
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents/${documentId}`)
if (!response.ok) {
@@ -191,7 +178,7 @@ export function Document({
}
const handleToggleEnabled = async (chunkId: string) => {
const chunk = chunks.find((c) => c.id === chunkId)
const chunk = allChunks.find((c) => c.id === chunkId)
if (!chunk) return
try {
@@ -223,7 +210,7 @@ export function Document({
}
const handleDeleteChunk = (chunkId: string) => {
const chunk = chunks.find((c) => c.id === chunkId)
const chunk = allChunks.find((c) => c.id === chunkId)
if (chunk) {
setChunkToDelete(chunk)
setIsDeleteModalOpen(true)
@@ -260,7 +247,7 @@ export function Document({
const handleSelectAll = (checked: boolean) => {
if (checked) {
setSelectedChunks(new Set(chunks.map((chunk) => chunk.id)))
setSelectedChunks(new Set(paginatedChunks.map((chunk) => chunk.id)))
} else {
setSelectedChunks(new Set())
}
@@ -329,28 +316,32 @@ export function Document({
}
const handleBulkEnable = async () => {
const chunksToEnable = chunks.filter((chunk) => selectedChunks.has(chunk.id) && !chunk.enabled)
const chunksToEnable = allChunks.filter(
(chunk) => selectedChunks.has(chunk.id) && !chunk.enabled
)
await performBulkChunkOperation('enable', chunksToEnable)
}
const handleBulkDisable = async () => {
const chunksToDisable = chunks.filter((chunk) => selectedChunks.has(chunk.id) && chunk.enabled)
const chunksToDisable = allChunks.filter(
(chunk) => selectedChunks.has(chunk.id) && chunk.enabled
)
await performBulkChunkOperation('disable', chunksToDisable)
}
const handleBulkDelete = async () => {
const chunksToDelete = chunks.filter((chunk) => selectedChunks.has(chunk.id))
const chunksToDelete = allChunks.filter((chunk) => selectedChunks.has(chunk.id))
await performBulkChunkOperation('delete', chunksToDelete)
}
// Calculate bulk operation counts
const selectedChunksList = chunks.filter((chunk) => selectedChunks.has(chunk.id))
const selectedChunksList = allChunks.filter((chunk) => selectedChunks.has(chunk.id))
const enabledCount = selectedChunksList.filter((chunk) => chunk.enabled).length
const disabledCount = selectedChunksList.filter((chunk) => !chunk.enabled).length
const isAllSelected = chunks.length > 0 && selectedChunks.size === chunks.length
const isAllSelected = paginatedChunks.length > 0 && selectedChunks.size === paginatedChunks.length
if (isLoadingDocument || isLoadingChunks) {
if (isLoadingDocument || isLoadingAllChunks) {
return (
<DocumentLoading
knowledgeBaseId={knowledgeBaseId}
@@ -360,7 +351,7 @@ export function Document({
)
}
if (combinedError && !isLoadingChunks) {
if (combinedError && !isLoadingAllChunks) {
const errorBreadcrumbs = [
{ label: 'Knowledge', href: `/workspace/${workspaceId}/knowledge` },
{
@@ -404,31 +395,16 @@ export function Document({
<div className='px-6 pb-6'>
{/* Search Section */}
<div className='mb-4 flex items-center justify-between pt-1'>
<div className='relative max-w-md'>
<div className='relative flex items-center'>
<Search className='-translate-y-1/2 pointer-events-none absolute top-1/2 left-3 h-[18px] w-[18px] transform text-muted-foreground' />
<input
type='text'
<SearchInput
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onChange={setSearchQuery}
placeholder={
document?.processingStatus === 'completed'
? 'Search chunks...'
: 'Document processing...'
}
disabled={document?.processingStatus !== 'completed'}
className='h-10 w-full rounded-md border bg-background px-9 py-2 text-sm ring-offset-background file:border-0 file:bg-transparent file:font-medium file:text-sm placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50'
/>
{searchQuery && document?.processingStatus === 'completed' && (
<button
onClick={() => setSearchQuery('')}
className='-translate-y-1/2 absolute top-1/2 right-3 transform text-muted-foreground hover:text-foreground'
>
<X className='h-[18px] w-[18px]' />
</button>
)}
</div>
</div>
<Button
onClick={() => setIsCreateChunkModalOpen(true)}
@@ -442,7 +418,7 @@ export function Document({
</div>
{/* Error State for chunks */}
{combinedError && !isLoadingChunks && (
{combinedError && !isLoadingAllChunks && (
<div className='mb-4 rounded-md border border-red-200 bg-red-50 p-4'>
<p className='text-red-800 text-sm'>Error loading chunks: {combinedError}</p>
</div>
@@ -540,7 +516,7 @@ export function Document({
<div className='text-muted-foreground text-xs'></div>
</td>
</tr>
) : chunks.length === 0 && !isLoadingChunks ? (
) : paginatedChunks.length === 0 && !isLoadingAllChunks ? (
<tr className='border-b transition-colors hover:bg-accent/30'>
<td className='px-4 py-3'>
<div className='h-3.5 w-3.5' />
@@ -553,7 +529,9 @@ export function Document({
<FileText className='h-5 w-5 text-muted-foreground' />
<span className='text-muted-foreground text-sm italic'>
{document?.processingStatus === 'completed'
? 'No chunks found'
? searchQuery.trim()
? 'No chunks match your search'
: 'No chunks found'
: 'Document is still processing...'}
</span>
</div>
@@ -568,7 +546,7 @@ export function Document({
<div className='text-muted-foreground text-xs'></div>
</td>
</tr>
) : isLoadingChunks ? (
) : isLoadingAllChunks ? (
// Show loading skeleton rows when chunks are loading
Array.from({ length: 5 }).map((_, index) => (
<tr key={`loading-${index}`} className='border-b transition-colors'>
@@ -593,7 +571,7 @@ export function Document({
</tr>
))
) : (
chunks.map((chunk) => (
paginatedChunks.map((chunk) => (
<tr
key={chunk.id}
className='cursor-pointer border-b transition-colors hover:bg-accent/30'
@@ -620,7 +598,10 @@ export function Document({
{/* Content column */}
<td className='px-4 py-3'>
<div className='text-sm' title={chunk.content}>
{truncateContent(chunk.content)}
<SearchHighlight
text={truncateContent(chunk.content)}
searchQuery={searchQuery}
/>
</div>
</td>
@@ -700,8 +681,8 @@ export function Document({
<Button
variant='ghost'
size='sm'
onClick={handlePrevPage}
disabled={!hasPrevPage || isLoadingChunks}
onClick={prevPage}
disabled={!hasPrevPage || isLoadingAllChunks}
className='h-8 w-8 p-0'
>
<ChevronLeft className='h-4 w-4' />
@@ -726,8 +707,8 @@ export function Document({
return (
<button
key={page}
onClick={() => handleGoToPage(page)}
disabled={isLoadingChunks}
onClick={() => goToPage(page)}
disabled={isLoadingAllChunks}
className={`font-medium text-sm transition-colors hover:text-foreground disabled:cursor-not-allowed disabled:opacity-50 ${
page === currentPage ? 'text-foreground' : 'text-muted-foreground'
}`}
@@ -741,8 +722,8 @@ export function Document({
<Button
variant='ghost'
size='sm'
onClick={handleNextPage}
disabled={!hasNextPage || isLoadingChunks}
onClick={nextPage}
disabled={!hasNextPage || isLoadingAllChunks}
className='h-8 w-8 p-0'
>
<ChevronRight className='h-4 w-4' />
@@ -767,7 +748,7 @@ export function Document({
updateChunk(updatedChunk.id, updatedChunk)
setSelectedChunk(updatedChunk)
}}
allChunks={chunks}
allChunks={allChunks}
currentPage={currentPage}
totalPages={totalPages}
onNavigateToChunk={(chunk: ChunkData) => {
@@ -777,11 +758,11 @@ export function Document({
await goToPage(page)
const checkAndSelectChunk = () => {
if (!isLoadingChunks && chunks.length > 0) {
if (!isLoadingAllChunks && paginatedChunks.length > 0) {
if (selectChunk === 'first') {
setSelectedChunk(chunks[0])
setSelectedChunk(paginatedChunks[0])
} else {
setSelectedChunk(chunks[chunks.length - 1])
setSelectedChunk(paginatedChunks[paginatedChunks.length - 1])
}
} else {
// Retry after a short delay if chunks aren't loaded yet

View File

@@ -0,0 +1,55 @@
'use client'
import { Fragment } from 'react'
interface SearchHighlightProps {
text: string
searchQuery: string
className?: string
}
export function SearchHighlight({ text, searchQuery, className = '' }: SearchHighlightProps) {
if (!searchQuery.trim()) {
return <span className={className}>{text}</span>
}
// Create a regex to find matches (case-insensitive)
// Handle multiple search terms separated by spaces
const searchTerms = searchQuery
.trim()
.split(/\s+/)
.filter((term) => term.length > 0)
if (searchTerms.length === 0) {
return <span className={className}>{text}</span>
}
// Create regex pattern for all search terms
const escapedTerms = searchTerms.map((term) => term.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'))
const regexPattern = `(${escapedTerms.join('|')})`
const regex = new RegExp(regexPattern, 'gi')
const parts = text.split(regex)
return (
<span className={className}>
{parts.map((part, index) => {
if (!part) return null
const isMatch = regex.test(part)
return (
<Fragment key={index}>
{isMatch ? (
<span className='rounded-sm bg-yellow-200 px-0.5 py-0.5 font-medium text-yellow-900 dark:bg-yellow-900/50 dark:text-yellow-200'>
{part}
</span>
) : (
part
)}
</Fragment>
)
})}
</span>
)
}

View File

@@ -1,4 +1,5 @@
import { useEffect, useState } from 'react'
import { useCallback, useEffect, useMemo, useState } from 'react'
import Fuse from 'fuse.js'
import { type ChunkData, type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
export function useKnowledgeBase(id: string) {
@@ -213,13 +214,23 @@ export function useKnowledgeBasesList() {
}
/**
* Hook to manage chunks for a specific document
* Hook to manage chunks for a specific document with optional client-side search
*/
export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
export function useDocumentChunks(
knowledgeBaseId: string,
documentId: string,
urlPage = 1,
urlSearch = '',
options: { enableClientSearch?: boolean } = {}
) {
const { getChunks, refreshChunks, updateChunk, getCachedChunks, clearChunks, isChunksLoading } =
useKnowledgeStore()
const { enableClientSearch = false } = options
// State for both modes
const [chunks, setChunks] = useState<ChunkData[]>([])
const [allChunks, setAllChunks] = useState<ChunkData[]>([])
const [isLoading, setIsLoading] = useState(true)
const [error, setError] = useState<string | null>(null)
const [pagination, setPagination] = useState({
@@ -228,18 +239,212 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
offset: 0,
hasMore: false,
})
const [currentPage, setCurrentPage] = useState(1)
const [searchQuery, setSearchQuery] = useState<string>('')
const [initialLoadDone, setInitialLoadDone] = useState(false)
const [isMounted, setIsMounted] = useState(false)
// Client-side search state
const [searchQuery, setSearchQuery] = useState('')
const [currentPage, setCurrentPage] = useState(urlPage)
// Handle mounting state
useEffect(() => {
setIsMounted(true)
return () => setIsMounted(false)
}, [])
// Sync with URL page changes
useEffect(() => {
setCurrentPage(urlPage)
}, [urlPage])
const isStoreLoading = isChunksLoading(documentId)
const combinedIsLoading = isLoading || isStoreLoading
// Computed pagination properties
const totalPages = Math.ceil(pagination.total / pagination.limit)
if (enableClientSearch) {
const loadAllChunks = useCallback(async () => {
if (!knowledgeBaseId || !documentId || !isMounted) return
try {
setIsLoading(true)
setError(null)
const allChunksData: ChunkData[] = []
let hasMore = true
let offset = 0
const limit = 50
while (hasMore && isMounted) {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks?limit=${limit}&offset=${offset}`
)
if (!response.ok) {
throw new Error('Failed to fetch chunks')
}
const result = await response.json()
if (result.success) {
allChunksData.push(...result.data)
hasMore = result.pagination.hasMore
offset += limit
} else {
throw new Error(result.error || 'Failed to fetch chunks')
}
}
if (isMounted) {
setAllChunks(allChunksData)
setChunks(allChunksData) // For compatibility
}
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load chunks')
}
} finally {
if (isMounted) {
setIsLoading(false)
}
}
}, [knowledgeBaseId, documentId, isMounted])
// Load chunks on mount
useEffect(() => {
if (isMounted) {
loadAllChunks()
}
}, [isMounted, loadAllChunks])
// Client-side filtering with fuzzy search
const filteredChunks = useMemo(() => {
if (!isMounted || !searchQuery.trim()) return allChunks
const fuse = new Fuse(allChunks, {
keys: ['content'],
threshold: 0.3, // Lower = more strict matching
includeScore: true,
includeMatches: true,
minMatchCharLength: 2,
ignoreLocation: true,
})
const results = fuse.search(searchQuery)
return results.map((result) => result.item)
}, [allChunks, searchQuery, isMounted])
// Client-side pagination
const CHUNKS_PER_PAGE = 50
const totalPages = Math.max(1, Math.ceil(filteredChunks.length / CHUNKS_PER_PAGE))
const hasNextPage = currentPage < totalPages
const hasPrevPage = currentPage > 1
const paginatedChunks = useMemo(() => {
const startIndex = (currentPage - 1) * CHUNKS_PER_PAGE
const endIndex = startIndex + CHUNKS_PER_PAGE
return filteredChunks.slice(startIndex, endIndex)
}, [filteredChunks, currentPage])
// Reset to page 1 when search changes
useEffect(() => {
if (currentPage > 1) {
setCurrentPage(1)
}
}, [searchQuery])
// Reset to valid page if current page exceeds total
useEffect(() => {
if (currentPage > totalPages && totalPages > 0) {
setCurrentPage(totalPages)
}
}, [currentPage, totalPages])
// Navigation functions
const goToPage = useCallback(
(page: number) => {
if (page >= 1 && page <= totalPages) {
setCurrentPage(page)
}
},
[totalPages]
)
const nextPage = useCallback(() => {
if (hasNextPage) {
setCurrentPage((prev) => prev + 1)
}
}, [hasNextPage])
const prevPage = useCallback(() => {
if (hasPrevPage) {
setCurrentPage((prev) => prev - 1)
}
}, [hasPrevPage])
// Operations
const refreshChunksData = useCallback(async () => {
await loadAllChunks()
}, [loadAllChunks])
const updateChunkLocal = useCallback((chunkId: string, updates: Partial<ChunkData>) => {
setAllChunks((prev) =>
prev.map((chunk) => (chunk.id === chunkId ? { ...chunk, ...updates } : chunk))
)
setChunks((prev) =>
prev.map((chunk) => (chunk.id === chunkId ? { ...chunk, ...updates } : chunk))
)
}, [])
return {
// Data - return paginatedChunks as chunks for display
chunks: paginatedChunks,
allChunks,
filteredChunks,
paginatedChunks,
// Search
searchQuery,
setSearchQuery,
// Pagination
currentPage,
totalPages,
hasNextPage,
hasPrevPage,
goToPage,
nextPage,
prevPage,
// State
isLoading: combinedIsLoading,
error,
pagination: {
total: filteredChunks.length,
limit: CHUNKS_PER_PAGE,
offset: (currentPage - 1) * CHUNKS_PER_PAGE,
hasMore: hasNextPage,
},
// Operations
refreshChunks: refreshChunksData,
updateChunk: updateChunkLocal,
clearChunks: () => clearChunks(documentId),
// Legacy compatibility
searchChunks: async (newSearchQuery: string) => {
setSearchQuery(newSearchQuery)
return paginatedChunks
},
}
}
const serverCurrentPage = urlPage
const serverSearchQuery = urlSearch
// Computed pagination properties
const serverTotalPages = Math.ceil(pagination.total / pagination.limit)
const serverHasNextPage = serverCurrentPage < serverTotalPages
const serverHasPrevPage = serverCurrentPage > 1
// Single effect to handle all data loading and syncing
useEffect(() => {
if (!knowledgeBaseId || !documentId) return
@@ -250,10 +455,12 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
try {
// Check cache first
const cached = getCachedChunks(documentId)
const expectedOffset = (serverCurrentPage - 1) * 50 // Use hardcoded limit
if (
cached &&
cached.searchQuery === searchQuery &&
cached.pagination.offset === (currentPage - 1) * pagination.limit
cached.searchQuery === serverSearchQuery &&
cached.pagination.offset === expectedOffset
) {
if (isMounted) {
setChunks(cached.chunks)
@@ -264,17 +471,17 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
return
}
// If not cached and we haven't done initial load, fetch from API
if (!initialLoadDone && !isStoreLoading) {
// Fetch from API
setIsLoading(true)
setError(null)
const offset = (currentPage - 1) * pagination.limit
const limit = 50
const offset = (serverCurrentPage - 1) * limit
const fetchedChunks = await getChunks(knowledgeBaseId, documentId, {
limit: pagination.limit,
limit,
offset,
search: searchQuery || undefined,
search: serverSearchQuery || undefined,
})
if (isMounted) {
@@ -288,7 +495,6 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
setInitialLoadDone(true)
}
}
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load chunks')
@@ -308,11 +514,10 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
}, [
knowledgeBaseId,
documentId,
currentPage,
searchQuery,
serverCurrentPage,
serverSearchQuery,
isStoreLoading,
initialLoadDone,
pagination.limit,
])
// Separate effect to sync with store state changes (no API calls)
@@ -320,10 +525,12 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
if (!documentId || !initialLoadDone) return
const cached = getCachedChunks(documentId)
const expectedOffset = (serverCurrentPage - 1) * 50
if (
cached &&
cached.searchQuery === searchQuery &&
cached.pagination.offset === (currentPage - 1) * pagination.limit
cached.searchQuery === serverSearchQuery &&
cached.pagination.offset === expectedOffset
) {
setChunks(cached.chunks)
setPagination(cached.pagination)
@@ -333,30 +540,22 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
if (!isStoreLoading && isLoading) {
setIsLoading(false)
}
}, [
documentId,
isStoreLoading,
isLoading,
initialLoadDone,
searchQuery,
currentPage,
pagination.limit,
])
}, [documentId, isStoreLoading, isLoading, initialLoadDone, serverSearchQuery, serverCurrentPage])
const goToPage = async (page: number) => {
if (page < 1 || page > totalPages || page === currentPage) return
if (page < 1 || page > serverTotalPages || page === serverCurrentPage) return
try {
setIsLoading(true)
setError(null)
setCurrentPage(page)
const offset = (page - 1) * pagination.limit
const limit = 50
const offset = (page - 1) * limit
const fetchedChunks = await getChunks(knowledgeBaseId, documentId, {
limit: pagination.limit,
limit,
offset,
search: searchQuery || undefined,
search: serverSearchQuery || undefined,
})
// Update local state from cache
@@ -376,14 +575,14 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
}
const nextPage = () => {
if (hasNextPage) {
return goToPage(currentPage + 1)
if (serverHasNextPage) {
return goToPage(serverCurrentPage + 1)
}
}
const prevPage = () => {
if (hasPrevPage) {
return goToPage(currentPage - 1)
if (serverHasPrevPage) {
return goToPage(serverCurrentPage - 1)
}
}
@@ -391,21 +590,18 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
search?: string
limit?: number
offset?: number
preservePage?: boolean
}) => {
try {
setIsLoading(true)
setError(null)
// Update search query if provided and reset to page 1
if (options?.search !== undefined) {
setSearchQuery(options.search)
setCurrentPage(1)
}
const offset = options?.offset ?? (currentPage - 1) * pagination.limit
const limit = 50
const offset = options?.offset ?? (serverCurrentPage - 1) * limit
const fetchedChunks = await refreshChunks(knowledgeBaseId, documentId, {
...options,
search: options?.search,
limit,
offset,
})
@@ -429,13 +625,12 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
try {
setIsLoading(true)
setError(null)
setSearchQuery(newSearchQuery)
setCurrentPage(1) // Reset to first page for new search
const limit = 50
const searchResults = await getChunks(knowledgeBaseId, documentId, {
search: newSearchQuery,
limit: pagination.limit,
offset: 0, // Reset to first page for new search
limit,
offset: 0, // Always start from first page for search
})
// Update local state from cache
@@ -456,13 +651,21 @@ export function useDocumentChunks(knowledgeBaseId: string, documentId: string) {
return {
chunks,
allChunks: chunks, // In server mode, allChunks is the same as chunks
filteredChunks: chunks, // In server mode, filteredChunks is the same as chunks
paginatedChunks: chunks, // In server mode, paginatedChunks is the same as chunks
// Search (not used in server mode but needed for consistency)
searchQuery: urlSearch,
setSearchQuery: () => {}, // No-op in server mode
isLoading: combinedIsLoading,
error,
pagination,
currentPage,
totalPages,
hasNextPage,
hasPrevPage,
currentPage: serverCurrentPage,
totalPages: serverTotalPages,
hasNextPage: serverHasNextPage,
hasPrevPage: serverHasPrevPage,
goToPage,
nextPage,
prevPage,

View File

@@ -81,6 +81,7 @@
"drizzle-orm": "^0.41.0",
"framer-motion": "^12.5.0",
"freestyle-sandboxes": "^0.0.38",
"fuse.js": "7.1.0",
"groq-sdk": "^0.15.0",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",

View File

@@ -112,6 +112,7 @@
"drizzle-orm": "^0.41.0",
"framer-motion": "^12.5.0",
"freestyle-sandboxes": "^0.0.38",
"fuse.js": "7.1.0",
"groq-sdk": "^0.15.0",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
@@ -1950,6 +1951,8 @@
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
"fuse.js": ["fuse.js@7.1.0", "", {}, "sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ=="],
"gaxios": ["gaxios@6.7.1", "", { "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", "is-stream": "^2.0.0", "node-fetch": "^2.6.9", "uuid": "^9.0.1" } }, "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ=="],
"gcp-metadata": ["gcp-metadata@6.1.1", "", { "dependencies": { "gaxios": "^6.1.1", "google-logging-utils": "^0.0.2", "json-bigint": "^1.0.0" } }, "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A=="],