fix(streaming): smoother streaming with throttled rendering, ResizeObserver scroll, and batched updates (#3471)

* fix(streaming): smoother streaming with throttled rendering, ResizeObserver scroll, and batched updates

- Add useThrottledValue hook (100ms trailing-edge throttle) to gate DOM re-renders during streaming across all chat surfaces
- Replace 100ms setInterval scroll polling with ResizeObserver-based auto-scroll, programmatic scroll timestamp tracking, and nested [data-scrollable] region handling
- Extract processContentBuffer from inline content handler for cleaner code organization in copilot SSE handlers
- Add RAF-based update batching (50ms max interval) to floating chat and home chat streaming paths
- Add useProgressiveList hook for progressive rendering of long conversation histories via requestAnimationFrame

Made-with: Cursor

* ack PR comments

* fix search modal

* more comments

* ack comments

* count

* ack comments

* ack comment
This commit is contained in:
Waleed
2026-03-09 13:27:33 -07:00
committed by GitHub
parent 7b43091984
commit 64cedfcff7
14 changed files with 754 additions and 397 deletions

View File

@@ -8,6 +8,7 @@ import {
ChatFileDownloadAll,
} from '@/app/chat/components/message/components/file-download'
import MarkdownRenderer from '@/app/chat/components/message/components/markdown-renderer'
import { useThrottledValue } from '@/hooks/use-throttled-value'
export interface ChatAttachment {
id: string
@@ -39,7 +40,8 @@ export interface ChatMessage {
}
function EnhancedMarkdownRenderer({ content }: { content: string }) {
return <MarkdownRenderer content={content} />
const throttled = useThrottledValue(content)
return <MarkdownRenderer content={throttled} />
}
export const ClientChatMessage = memo(

View File

@@ -78,18 +78,15 @@ export function useChatStreaming() {
abortControllerRef.current.abort()
abortControllerRef.current = null
// Add a message indicating the response was stopped
const latestContent = accumulatedTextRef.current
setMessages((prev) => {
const lastMessage = prev[prev.length - 1]
// Only modify if the last message is from the assistant (as expected)
if (lastMessage && lastMessage.type === 'assistant') {
// Append a note that the response was stopped
const content = latestContent || lastMessage.content
const updatedContent =
lastMessage.content +
(lastMessage.content
? '\n\n_Response stopped by user._'
: '_Response stopped by user._')
content + (content ? '\n\n_Response stopped by user._' : '_Response stopped by user._')
return [
...prev.slice(0, -1),
@@ -100,7 +97,6 @@ export function useChatStreaming() {
return prev
})
// Reset streaming state immediately
setIsStreamingResponse(false)
accumulatedTextRef.current = ''
lastStreamedPositionRef.current = 0
@@ -139,9 +135,49 @@ export function useChatStreaming() {
let accumulatedText = ''
let lastAudioPosition = 0
// Track which blocks have streamed content (like chat panel)
const messageIdMap = new Map<string, string>()
const messageId = crypto.randomUUID()
const UI_BATCH_MAX_MS = 50
let uiDirty = false
let uiRAF: number | null = null
let uiTimer: ReturnType<typeof setTimeout> | null = null
let lastUIFlush = 0
const flushUI = () => {
if (uiRAF !== null) {
cancelAnimationFrame(uiRAF)
uiRAF = null
}
if (uiTimer !== null) {
clearTimeout(uiTimer)
uiTimer = null
}
if (!uiDirty) return
uiDirty = false
lastUIFlush = performance.now()
const snapshot = accumulatedText
setMessages((prev) =>
prev.map((msg) => {
if (msg.id !== messageId) return msg
if (!msg.isStreaming) return msg
return { ...msg, content: snapshot }
})
)
}
const scheduleUIFlush = () => {
if (uiRAF !== null) return
const elapsed = performance.now() - lastUIFlush
if (elapsed >= UI_BATCH_MAX_MS) {
flushUI()
return
}
uiRAF = requestAnimationFrame(flushUI)
if (uiTimer === null) {
uiTimer = setTimeout(flushUI, Math.max(0, UI_BATCH_MAX_MS - elapsed))
}
}
setMessages((prev) => [
...prev,
{
@@ -165,6 +201,7 @@ export function useChatStreaming() {
const { done, value } = await reader.read()
if (done) {
flushUI()
// Stream any remaining text for TTS
if (
shouldPlayAudio &&
@@ -217,6 +254,7 @@ export function useChatStreaming() {
}
if (eventType === 'final' && json.data) {
flushUI()
const finalData = json.data as {
success: boolean
error?: string | { message?: string }
@@ -367,6 +405,7 @@ export function useChatStreaming() {
}
accumulatedText += contentChunk
accumulatedTextRef.current = accumulatedText
logger.debug('[useChatStreaming] Received chunk', {
blockId,
chunkLength: contentChunk.length,
@@ -374,11 +413,8 @@ export function useChatStreaming() {
messageId,
chunk: contentChunk.substring(0, 20),
})
setMessages((prev) =>
prev.map((msg) =>
msg.id === messageId ? { ...msg, content: accumulatedText } : msg
)
)
uiDirty = true
scheduleUIFlush()
// Real-time TTS for voice mode
if (shouldPlayAudio && streamingOptions?.audioStreamHandler) {
@@ -419,10 +455,13 @@ export function useChatStreaming() {
}
} catch (error) {
logger.error('Error processing stream:', error)
flushUI()
setMessages((prev) =>
prev.map((msg) => (msg.id === messageId ? { ...msg, isStreaming: false } : msg))
)
} finally {
if (uiRAF !== null) cancelAnimationFrame(uiRAF)
if (uiTimer !== null) clearTimeout(uiTimer)
setIsStreamingResponse(false)
abortControllerRef.current = null

View File

@@ -3,6 +3,7 @@
import ReactMarkdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { cn } from '@/lib/core/utils/cn'
import { useThrottledValue } from '@/hooks/use-throttled-value'
import type { ContentBlock, ToolCallStatus } from '../../types'
const REMARK_PLUGINS = [remarkGfm]
@@ -96,6 +97,15 @@ function parseBlocks(blocks: ContentBlock[], isStreaming: boolean): MessageSegme
return segments
}
function ThrottledTextSegment({ content }: { content: string }) {
const throttled = useThrottledValue(content)
return (
<div className={PROSE_CLASSES}>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>{throttled}</ReactMarkdown>
</div>
)
}
interface MessageContentProps {
blocks: ContentBlock[]
fallbackContent: string
@@ -118,11 +128,7 @@ export function MessageContent({ blocks, fallbackContent, isStreaming }: Message
<div className='space-y-[10px]'>
{segments.map((segment, i) => {
if (segment.type === 'text') {
return (
<div key={`text-${i}`} className={PROSE_CLASSES}>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>{segment.content}</ReactMarkdown>
</div>
)
return <ThrottledTextSegment key={`text-${i}`} content={segment.content} />
}
return (

View File

@@ -522,10 +522,46 @@ export function Chat() {
let accumulatedContent = ''
let buffer = ''
const BATCH_MAX_MS = 50
let pendingChunks = ''
let batchRAF: number | null = null
let batchTimer: ReturnType<typeof setTimeout> | null = null
let lastFlush = 0
const flushChunks = () => {
if (batchRAF !== null) {
cancelAnimationFrame(batchRAF)
batchRAF = null
}
if (batchTimer !== null) {
clearTimeout(batchTimer)
batchTimer = null
}
if (pendingChunks) {
appendMessageContent(responseMessageId, pendingChunks)
pendingChunks = ''
}
lastFlush = performance.now()
}
const scheduleFlush = () => {
if (batchRAF !== null) return
const elapsed = performance.now() - lastFlush
if (elapsed >= BATCH_MAX_MS) {
flushChunks()
return
}
batchRAF = requestAnimationFrame(flushChunks)
if (batchTimer === null) {
batchTimer = setTimeout(flushChunks, Math.max(0, BATCH_MAX_MS - elapsed))
}
}
try {
while (true) {
const { done, value } = await reader.read()
if (done) {
flushChunks()
finalizeMessageStream(responseMessageId)
break
}
@@ -558,6 +594,7 @@ export function Chat() {
if ('success' in result && !result.success) {
const errorMessage = result.error || 'Workflow execution failed'
flushChunks()
appendMessageContent(
responseMessageId,
`${accumulatedContent ? '\n\n' : ''}Error: ${errorMessage}`
@@ -566,10 +603,12 @@ export function Chat() {
return
}
flushChunks()
finalizeMessageStream(responseMessageId)
} else if (contentChunk) {
accumulatedContent += contentChunk
appendMessageContent(responseMessageId, contentChunk)
pendingChunks += contentChunk
scheduleFlush()
}
} catch (e) {
logger.error('Error parsing stream data:', e)
@@ -580,8 +619,11 @@ export function Chat() {
if ((error as Error)?.name !== 'AbortError') {
logger.error('Error processing stream:', error)
}
flushChunks()
finalizeMessageStream(responseMessageId)
} finally {
if (batchRAF !== null) cancelAnimationFrame(batchRAF)
if (batchTimer !== null) clearTimeout(batchTimer)
if (streamReaderRef.current === reader) {
streamReaderRef.current = null
}

View File

@@ -1,5 +1,6 @@
import { useMemo } from 'react'
import { StreamingIndicator } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
import { useThrottledValue } from '@/hooks/use-throttled-value'
interface ChatAttachment {
id: string
@@ -93,13 +94,16 @@ const WordWrap = ({ text }: { text: string }) => {
* Renders a chat message with optional file attachments
*/
export function ChatMessage({ message }: ChatMessageProps) {
const formattedContent = useMemo(() => {
const rawContent = useMemo(() => {
if (typeof message.content === 'object' && message.content !== null) {
return JSON.stringify(message.content, null, 2)
}
return String(message.content || '')
}, [message.content])
const throttled = useThrottledValue(rawContent)
const formattedContent = message.type === 'user' ? rawContent : throttled
const handleAttachmentClick = (attachment: ChatAttachment) => {
const validDataUrl = attachment.dataUrl?.trim()
if (validDataUrl?.startsWith('data:')) {

View File

@@ -127,12 +127,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const params = useParams()
const workspaceId = params.workspaceId as string
const copilotStore = useCopilotStore()
const workflowId =
workflowIdOverride !== undefined ? workflowIdOverride : copilotStore.workflowId
const storeWorkflowId = useCopilotStore((s) => s.workflowId)
const storeSelectedModel = useCopilotStore((s) => s.selectedModel)
const storeSetSelectedModel = useCopilotStore((s) => s.setSelectedModel)
const workflowId = workflowIdOverride !== undefined ? workflowIdOverride : storeWorkflowId
const selectedModel =
selectedModelOverride !== undefined ? selectedModelOverride : copilotStore.selectedModel
const setSelectedModel = onModelChangeOverride || copilotStore.setSelectedModel
selectedModelOverride !== undefined ? selectedModelOverride : storeSelectedModel
const setSelectedModel = onModelChangeOverride || storeSetSelectedModel
const [internalMessage, setInternalMessage] = useState('')
const [isNearTop, setIsNearTop] = useState(false)

View File

@@ -40,6 +40,7 @@ import {
useTodoManagement,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/hooks'
import { useScrollManagement } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useProgressiveList } from '@/hooks/use-progressive-list'
import type { ChatContext } from '@/stores/panel'
import { useCopilotStore } from '@/stores/panel'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -90,7 +91,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
isSendingMessage,
isAborting,
mode,
inputValue,
planTodos,
showPlanTodos,
streamingPlanContent,
@@ -98,7 +98,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
abortMessage,
createNewChat,
setMode,
setInputValue,
chatsLoadedForWorkflow,
setWorkflowId: setCopilotWorkflowId,
loadChats,
@@ -116,6 +115,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
resumeActiveStream,
} = useCopilotStore()
const [inputValue, setInputValue] = useState('')
// Initialize copilot
const { isInitialized } = useCopilotInitialization({
activeWorkflowId,
@@ -133,6 +134,9 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
// Handle scroll management
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
const chatKey = currentChat?.id ?? ''
const { staged: stagedMessages } = useProgressiveList(messages, chatKey)
// Handle chat history grouping
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
{
@@ -468,19 +472,21 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
showPlanTodos && planTodos.length > 0 ? 'pb-14' : 'pb-10'
}`}
>
{messages.map((message, index) => {
{stagedMessages.map((message, index) => {
let isDimmed = false
const globalIndex = messages.length - stagedMessages.length + index
if (editingMessageId) {
const editingIndex = messages.findIndex((m) => m.id === editingMessageId)
isDimmed = editingIndex !== -1 && index > editingIndex
isDimmed = editingIndex !== -1 && globalIndex > editingIndex
}
if (!isDimmed && revertingMessageId) {
const revertingIndex = messages.findIndex(
(m) => m.id === revertingMessageId
)
isDimmed = revertingIndex !== -1 && index > revertingIndex
isDimmed = revertingIndex !== -1 && globalIndex > revertingIndex
}
const checkpointCount = messageCheckpoints[message.id]?.length || 0
@@ -501,7 +507,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
onRevertModeChange={(isReverting) =>
handleRevertModeChange(message.id, isReverting)
}
isLastMessage={index === messages.length - 1}
isLastMessage={globalIndex === messages.length - 1}
/>
)
})}

View File

@@ -2,33 +2,34 @@
import { useCallback, useEffect, useRef, useState } from 'react'
/**
* Options for configuring scroll behavior
*/
const AUTO_SCROLL_GRACE_MS = 120
function distanceFromBottom(el: HTMLElement) {
return el.scrollHeight - el.scrollTop - el.clientHeight
}
interface UseScrollManagementOptions {
/**
* Scroll behavior for programmatic scrolls
* @remarks
* Scroll behavior for programmatic scrolls.
* - `smooth`: Animated scroll (default, used by Copilot)
* - `auto`: Immediate scroll to bottom (used by floating chat to avoid jitter)
*/
behavior?: 'auto' | 'smooth'
/**
* Distance from bottom (in pixels) within which auto-scroll stays active
* @remarks Lower values = less sticky (user can scroll away easier)
* Distance from bottom (in pixels) within which auto-scroll stays active.
* @defaultValue 30
*/
stickinessThreshold?: number
}
/**
* Custom hook to manage scroll behavior in scrollable message panels.
* Handles auto-scrolling during message streaming and user-initiated scrolling.
* Manages auto-scrolling during message streaming using ResizeObserver
* instead of a polling interval.
*
* @param messages - Array of messages to track for scroll behavior
* @param isSendingMessage - Whether a message is currently being sent/streamed
* @param options - Optional configuration for scroll behavior
* @returns Scroll management utilities
* Tracks whether scrolls are programmatic (via a timestamp grace window)
* to avoid falsely treating our own scrolls as the user scrolling away.
* Handles nested scrollable regions marked with `data-scrollable` so that
* scrolling inside tool output or code blocks doesn't break follow-mode.
*/
export function useScrollManagement(
messages: any[],
@@ -37,68 +38,98 @@ export function useScrollManagement(
) {
const scrollAreaRef = useRef<HTMLDivElement>(null)
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
const programmaticScrollRef = useRef(false)
const programmaticUntilRef = useRef(0)
const lastScrollTopRef = useRef(0)
const scrollBehavior = options?.behavior ?? 'smooth'
const stickinessThreshold = options?.stickinessThreshold ?? 30
/** Scrolls the container to the bottom */
const isSendingRef = useRef(isSendingMessage)
isSendingRef.current = isSendingMessage
const userScrolledRef = useRef(userHasScrolledAway)
userScrolledRef.current = userHasScrolledAway
const markProgrammatic = useCallback(() => {
programmaticUntilRef.current = Date.now() + AUTO_SCROLL_GRACE_MS
}, [])
const isProgrammatic = useCallback(() => {
return Date.now() < programmaticUntilRef.current
}, [])
const scrollToBottom = useCallback(() => {
const container = scrollAreaRef.current
if (!container) return
programmaticScrollRef.current = true
markProgrammatic()
container.scrollTo({ top: container.scrollHeight, behavior: scrollBehavior })
}, [scrollBehavior, markProgrammatic])
window.setTimeout(() => {
programmaticScrollRef.current = false
}, 200)
}, [scrollBehavior])
/** Handles scroll events to track user position */
const handleScroll = useCallback(() => {
const container = scrollAreaRef.current
if (!container || programmaticScrollRef.current) return
const { scrollTop, scrollHeight, clientHeight } = container
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
const nearBottom = distanceFromBottom <= stickinessThreshold
const delta = scrollTop - lastScrollTopRef.current
if (isSendingMessage) {
// User scrolled up during streaming - break away
if (delta < -2) {
setUserHasScrolledAway(true)
}
// User scrolled back down to bottom - re-stick
if (userHasScrolledAway && delta > 2 && nearBottom) {
setUserHasScrolledAway(false)
}
}
lastScrollTopRef.current = scrollTop
}, [isSendingMessage, userHasScrolledAway, stickinessThreshold])
/** Attaches scroll listener to container */
useEffect(() => {
const container = scrollAreaRef.current
if (!container) return
const handleScroll = () => {
const { scrollTop, scrollHeight, clientHeight } = container
const dist = scrollHeight - scrollTop - clientHeight
if (isProgrammatic()) {
lastScrollTopRef.current = scrollTop
if (dist < stickinessThreshold && userScrolledRef.current) {
setUserHasScrolledAway(false)
}
return
}
const nearBottom = dist <= stickinessThreshold
const delta = scrollTop - lastScrollTopRef.current
if (isSendingRef.current) {
if (delta < -2 && !userScrolledRef.current) {
setUserHasScrolledAway(true)
}
if (userScrolledRef.current && delta > 2 && nearBottom) {
setUserHasScrolledAway(false)
}
}
lastScrollTopRef.current = scrollTop
}
container.addEventListener('scroll', handleScroll, { passive: true })
lastScrollTopRef.current = container.scrollTop
return () => container.removeEventListener('scroll', handleScroll)
}, [handleScroll])
}, [stickinessThreshold, isProgrammatic])
// Ignore upward wheel events inside nested [data-scrollable] regions
// (tool output, code blocks) so they don't break follow-mode.
useEffect(() => {
const container = scrollAreaRef.current
if (!container) return
const handleWheel = (e: WheelEvent) => {
if (e.deltaY >= 0) return
const target = e.target instanceof Element ? e.target : undefined
const nested = target?.closest('[data-scrollable]')
if (nested && nested !== container) return
if (!userScrolledRef.current && isSendingRef.current) {
setUserHasScrolledAway(true)
}
}
container.addEventListener('wheel', handleWheel, { passive: true })
return () => container.removeEventListener('wheel', handleWheel)
}, [])
/** Handles auto-scroll when new messages are added */
useEffect(() => {
if (messages.length === 0) return
const lastMessage = messages[messages.length - 1]
const isUserMessage = lastMessage?.role === 'user'
// Always scroll for user messages, respect scroll state for assistant messages
if (isUserMessage) {
setUserHasScrolledAway(false)
scrollToBottom()
@@ -107,35 +138,42 @@ export function useScrollManagement(
}
}, [messages, userHasScrolledAway, scrollToBottom])
/** Resets scroll state when streaming completes */
useEffect(() => {
if (!isSendingMessage) {
setUserHasScrolledAway(false)
}
}, [isSendingMessage])
/** Keeps scroll pinned during streaming - uses interval, stops when user scrolls away */
useEffect(() => {
// Early return stops the interval when user scrolls away (state change re-runs effect)
if (!isSendingMessage || userHasScrolledAway) {
return
}
if (!isSendingMessage || userHasScrolledAway) return
const intervalId = window.setInterval(() => {
const container = scrollAreaRef.current
if (!container) return
const container = scrollAreaRef.current
if (!container) return
const { scrollTop, scrollHeight, clientHeight } = container
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
const content = container.firstElementChild as HTMLElement | null
if (!content) return
if (distanceFromBottom > 1) {
const observer = new ResizeObserver(() => {
if (distanceFromBottom(container) > 1) {
scrollToBottom()
}
}, 100)
})
return () => window.clearInterval(intervalId)
observer.observe(content)
return () => observer.disconnect()
}, [isSendingMessage, userHasScrolledAway, scrollToBottom])
// overflow-anchor: none during streaming prevents the browser from
// fighting our programmatic scrollToBottom calls (Chromium/Firefox only;
// Safari does not support this property).
useEffect(() => {
const container = scrollAreaRef.current
if (!container) return
container.style.overflowAnchor = isSendingMessage && !userHasScrolledAway ? 'none' : 'auto'
}, [isSendingMessage, userHasScrolledAway])
return {
scrollAreaRef,
scrollToBottom,

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { memo, useCallback, useDeferredValue, useEffect, useMemo, useRef, useState } from 'react'
import { Command } from 'cmdk'
import { Database, Files, HelpCircle, Settings } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
@@ -19,23 +19,34 @@ import type {
SearchToolOperationItem,
} from '@/stores/modals/search/types'
function customFilter(value: string, search: string): number {
const searchLower = search.toLowerCase()
function scoreMatch(value: string, search: string): number {
if (!search) return 1
const valueLower = value.toLowerCase()
const searchLower = search.toLowerCase()
if (valueLower === searchLower) return 1
if (valueLower.startsWith(searchLower)) return 0.9
if (valueLower.includes(searchLower)) return 0.7
const searchWords = searchLower.split(/\s+/).filter(Boolean)
if (searchWords.length > 1) {
const allWordsMatch = searchWords.every((word) => valueLower.includes(word))
if (allWordsMatch) return 0.5
const words = searchLower.split(/\s+/).filter(Boolean)
if (words.length > 1) {
if (words.every((w) => valueLower.includes(w))) return 0.5
}
return 0
}
function filterAndSort<T>(items: T[], toValue: (item: T) => string, search: string): T[] {
if (!search) return items
const scored: [T, number][] = []
for (const item of items) {
const s = scoreMatch(toValue(item), search)
if (s > 0) scored.push([item, s])
}
scored.sort((a, b) => b[1] - a[1])
return scored.map(([item]) => item)
}
interface TaskItem {
id: string
name: string
@@ -165,20 +176,27 @@ export function SearchModal({
)
useEffect(() => {
if (open && inputRef.current) {
const nativeInputValueSetter = Object.getOwnPropertyDescriptor(
window.HTMLInputElement.prototype,
'value'
)?.set
if (nativeInputValueSetter) {
nativeInputValueSetter.call(inputRef.current, '')
inputRef.current.dispatchEvent(new Event('input', { bubbles: true }))
if (open) {
setSearch('')
if (inputRef.current) {
const nativeInputValueSetter = Object.getOwnPropertyDescriptor(
window.HTMLInputElement.prototype,
'value'
)?.set
if (nativeInputValueSetter) {
nativeInputValueSetter.call(inputRef.current, '')
inputRef.current.dispatchEvent(new Event('input', { bubbles: true }))
}
inputRef.current.focus()
}
inputRef.current.focus()
}
}, [open])
const handleSearchChange = useCallback(() => {
const [search, setSearch] = useState('')
const deferredSearch = useDeferredValue(search)
const handleSearchChange = useCallback((value: string) => {
setSearch(value)
requestAnimationFrame(() => {
const list = document.querySelector('[cmdk-list]')
if (list) {
@@ -274,11 +292,51 @@ export function SearchModal({
[onOpenChange]
)
const showBlocks = isOnWorkflowPage && blocks.length > 0
const showTools = isOnWorkflowPage && tools.length > 0
const showTriggers = isOnWorkflowPage && triggers.length > 0
const showToolOperations = isOnWorkflowPage && toolOperations.length > 0
const showDocs = isOnWorkflowPage && docs.length > 0
const filteredBlocks = useMemo(() => {
if (!isOnWorkflowPage) return []
return filterAndSort(blocks, (b) => `${b.name} block-${b.id}`, deferredSearch)
}, [isOnWorkflowPage, blocks, deferredSearch])
const filteredTools = useMemo(() => {
if (!isOnWorkflowPage) return []
return filterAndSort(tools, (t) => `${t.name} tool-${t.id}`, deferredSearch)
}, [isOnWorkflowPage, tools, deferredSearch])
const filteredTriggers = useMemo(() => {
if (!isOnWorkflowPage) return []
return filterAndSort(triggers, (t) => `${t.name} trigger-${t.id}`, deferredSearch)
}, [isOnWorkflowPage, triggers, deferredSearch])
const filteredToolOps = useMemo(() => {
if (!isOnWorkflowPage) return []
return filterAndSort(
toolOperations,
(op) => `${op.searchValue} operation-${op.id}`,
deferredSearch
)
}, [isOnWorkflowPage, toolOperations, deferredSearch])
const filteredDocs = useMemo(() => {
if (!isOnWorkflowPage) return []
return filterAndSort(docs, (d) => `${d.name} docs documentation doc-${d.id}`, deferredSearch)
}, [isOnWorkflowPage, docs, deferredSearch])
const filteredWorkflows = useMemo(
() => filterAndSort(workflows, (w) => `${w.name} workflow-${w.id}`, deferredSearch),
[workflows, deferredSearch]
)
const filteredTasks = useMemo(
() => filterAndSort(tasks, (t) => `${t.name} task-${t.id}`, deferredSearch),
[tasks, deferredSearch]
)
const filteredWorkspaces = useMemo(
() => filterAndSort(workspaces, (w) => `${w.name} workspace-${w.id}`, deferredSearch),
[workspaces, deferredSearch]
)
const filteredPages = useMemo(
() => filterAndSort(pages, (p) => `${p.name} page-${p.id}`, deferredSearch),
[pages, deferredSearch]
)
if (!mounted) return null
@@ -294,7 +352,6 @@ export function SearchModal({
aria-hidden={!open}
/>
{/* Command palette - always rendered for instant opening, hidden with CSS */}
<div
role='dialog'
aria-modal={open}
@@ -306,7 +363,7 @@ export function SearchModal({
)}
style={{ left: 'calc(50% + var(--sidebar-width, 0px) / 2)' }}
>
<Command label='Search' filter={customFilter}>
<Command label='Search' shouldFilter={false}>
<Command.Input
ref={inputRef}
autoFocus
@@ -319,10 +376,10 @@ export function SearchModal({
No results found.
</Command.Empty>
{showBlocks && (
{filteredBlocks.length > 0 && (
<Command.Group heading='Blocks' className={groupHeadingClassName}>
{blocks.map((block) => (
<CommandItem
{filteredBlocks.map((block) => (
<MemoizedCommandItem
key={block.id}
value={`${block.name} block-${block.id}`}
onSelect={() => handleBlockSelect(block, 'block')}
@@ -331,15 +388,15 @@ export function SearchModal({
showColoredIcon
>
{block.name}
</CommandItem>
</MemoizedCommandItem>
))}
</Command.Group>
)}
{showTools && (
{filteredTools.length > 0 && (
<Command.Group heading='Tools' className={groupHeadingClassName}>
{tools.map((tool) => (
<CommandItem
{filteredTools.map((tool) => (
<MemoizedCommandItem
key={tool.id}
value={`${tool.name} tool-${tool.id}`}
onSelect={() => handleBlockSelect(tool, 'tool')}
@@ -348,15 +405,15 @@ export function SearchModal({
showColoredIcon
>
{tool.name}
</CommandItem>
</MemoizedCommandItem>
))}
</Command.Group>
)}
{showTriggers && (
{filteredTriggers.length > 0 && (
<Command.Group heading='Triggers' className={groupHeadingClassName}>
{triggers.map((trigger) => (
<CommandItem
{filteredTriggers.map((trigger) => (
<MemoizedCommandItem
key={trigger.id}
value={`${trigger.name} trigger-${trigger.id}`}
onSelect={() => handleBlockSelect(trigger, 'trigger')}
@@ -365,14 +422,14 @@ export function SearchModal({
showColoredIcon
>
{trigger.name}
</CommandItem>
</MemoizedCommandItem>
))}
</Command.Group>
)}
{workflows.length > 0 && (
{filteredWorkflows.length > 0 && open && (
<Command.Group heading='Workflows' className={groupHeadingClassName}>
{workflows.map((workflow) => (
{filteredWorkflows.map((workflow) => (
<Command.Item
key={workflow.id}
value={`${workflow.name} workflow-${workflow.id}`}
@@ -396,9 +453,9 @@ export function SearchModal({
</Command.Group>
)}
{tasks.length > 0 && (
{filteredTasks.length > 0 && open && (
<Command.Group heading='Tasks' className={groupHeadingClassName}>
{tasks.map((task) => (
{filteredTasks.map((task) => (
<Command.Item
key={task.id}
value={`${task.name} task-${task.id}`}
@@ -419,10 +476,10 @@ export function SearchModal({
</Command.Group>
)}
{showToolOperations && (
{filteredToolOps.length > 0 && (
<Command.Group heading='Tool Operations' className={groupHeadingClassName}>
{toolOperations.map((op) => (
<CommandItem
{filteredToolOps.map((op) => (
<MemoizedCommandItem
key={op.id}
value={`${op.searchValue} operation-${op.id}`}
onSelect={() => handleToolOperationSelect(op)}
@@ -431,14 +488,14 @@ export function SearchModal({
showColoredIcon
>
{op.name}
</CommandItem>
</MemoizedCommandItem>
))}
</Command.Group>
)}
{workspaces.length > 0 && (
{filteredWorkspaces.length > 0 && open && (
<Command.Group heading='Workspaces' className={groupHeadingClassName}>
{workspaces.map((workspace) => (
{filteredWorkspaces.map((workspace) => (
<Command.Item
key={workspace.id}
value={`${workspace.name} workspace-${workspace.id}`}
@@ -454,10 +511,10 @@ export function SearchModal({
</Command.Group>
)}
{showDocs && (
{filteredDocs.length > 0 && (
<Command.Group heading='Docs' className={groupHeadingClassName}>
{docs.map((doc) => (
<CommandItem
{filteredDocs.map((doc) => (
<MemoizedCommandItem
key={doc.id}
value={`${doc.name} docs documentation doc-${doc.id}`}
onSelect={() => handleDocSelect(doc)}
@@ -466,14 +523,14 @@ export function SearchModal({
showColoredIcon
>
{doc.name}
</CommandItem>
</MemoizedCommandItem>
))}
</Command.Group>
)}
{pages.length > 0 && (
{filteredPages.length > 0 && open && (
<Command.Group heading='Pages' className={groupHeadingClassName}>
{pages.map((page) => {
{filteredPages.map((page) => {
const Icon = page.icon
return (
<Command.Item
@@ -518,36 +575,46 @@ interface CommandItemProps {
children: React.ReactNode
}
function CommandItem({
value,
onSelect,
icon: Icon,
bgColor,
showColoredIcon,
children,
}: CommandItemProps) {
return (
<Command.Item
value={value}
onSelect={onSelect}
className='group flex h-[28px] w-full cursor-pointer items-center gap-[8px] rounded-[6px] px-[10px] text-left text-[15px] aria-selected:bg-[var(--border)] aria-selected:shadow-sm data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50'
>
<div
className='relative flex h-[16px] w-[16px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
style={{ background: showColoredIcon ? bgColor : 'transparent' }}
// onSelect is safe to exclude: cmdk stores it in a ref (useAsRef) internally,
// so the latest closure is always invoked regardless of whether React re-renders.
const MemoizedCommandItem = memo(
function CommandItem({
value,
onSelect,
icon: Icon,
bgColor,
showColoredIcon,
children,
}: CommandItemProps) {
return (
<Command.Item
value={value}
onSelect={onSelect}
className='group flex h-[28px] w-full cursor-pointer items-center gap-[8px] rounded-[6px] px-[10px] text-left text-[15px] aria-selected:bg-[var(--border)] aria-selected:shadow-sm data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50'
>
<Icon
className={cn(
'transition-transform duration-100 group-hover:scale-110',
showColoredIcon
? '!h-[10px] !w-[10px] text-white'
: 'h-[14px] w-[14px] text-[var(--text-tertiary)] group-aria-selected:text-[var(--text-primary)]'
)}
/>
</div>
<span className='truncate font-base text-[var(--text-tertiary)] group-aria-selected:text-[var(--text-primary)]'>
{children}
</span>
</Command.Item>
)
}
<div
className='relative flex h-[16px] w-[16px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
style={{ background: showColoredIcon ? bgColor : 'transparent' }}
>
<Icon
className={cn(
'transition-transform duration-100 group-hover:scale-110',
showColoredIcon
? '!h-[10px] !w-[10px] text-white'
: 'h-[14px] w-[14px] text-[var(--text-tertiary)] group-aria-selected:text-[var(--text-primary)]'
)}
/>
</div>
<span className='truncate font-base text-[var(--text-tertiary)] group-aria-selected:text-[var(--text-primary)]'>
{children}
</span>
</Command.Item>
)
},
(prev, next) =>
prev.value === next.value &&
prev.icon === next.icon &&
prev.bgColor === next.bgColor &&
prev.showColoredIcon === next.showColoredIcon &&
prev.children === next.children
)

View File

@@ -0,0 +1,101 @@
'use client'
import { useEffect, useRef, useState } from 'react'
interface ProgressiveListOptions {
/** Number of items to render in the initial batch (most recent items) */
initialBatch?: number
/** Number of items to add per animation frame */
batchSize?: number
}
const DEFAULTS = {
initialBatch: 10,
batchSize: 5,
} satisfies Required<ProgressiveListOptions>
/**
* Progressively renders a list of items so that first paint is fast.
*
* On mount (or when `key` changes), only the most recent `initialBatch`
* items are rendered. The rest are added in `batchSize` increments via
* `requestAnimationFrame` so the browser never blocks on a large DOM mount.
*
* Once staging completes for a given key it never re-stages -- new items
* appended to the list are rendered immediately.
*
* @param items Full list of items to render.
* @param key A session/conversation identifier. When it changes,
* staging restarts for the new list.
* @param options Tuning knobs for batch sizes.
* @returns The currently staged (visible) subset of items.
*/
export function useProgressiveList<T>(
items: T[],
key: string,
options?: ProgressiveListOptions
): { staged: T[]; isStaging: boolean } {
const initialBatch = options?.initialBatch ?? DEFAULTS.initialBatch
const batchSize = options?.batchSize ?? DEFAULTS.batchSize
const completedKeysRef = useRef(new Set<string>())
const prevKeyRef = useRef(key)
const stagingCountRef = useRef(initialBatch)
const [count, setCount] = useState(() => {
if (items.length <= initialBatch) return items.length
return initialBatch
})
useEffect(() => {
if (completedKeysRef.current.has(key)) {
setCount(items.length)
return
}
if (items.length <= initialBatch) {
setCount(items.length)
completedKeysRef.current.add(key)
return
}
let current = Math.max(stagingCountRef.current, initialBatch)
setCount(current)
let frame: number | undefined
const step = () => {
const total = items.length
current = Math.min(total, current + batchSize)
stagingCountRef.current = current
setCount(current)
if (current >= total) {
completedKeysRef.current.add(key)
frame = undefined
return
}
frame = requestAnimationFrame(step)
}
frame = requestAnimationFrame(step)
return () => {
if (frame !== undefined) cancelAnimationFrame(frame)
}
}, [key, items.length, initialBatch, batchSize])
let effectiveCount = count
if (prevKeyRef.current !== key) {
effectiveCount = items.length <= initialBatch ? items.length : initialBatch
stagingCountRef.current = initialBatch
}
prevKeyRef.current = key
const isCompleted = completedKeysRef.current.has(key)
const isStaging = !isCompleted && effectiveCount < items.length
const staged =
isCompleted || effectiveCount >= items.length
? items
: items.slice(Math.max(0, items.length - effectiveCount))
return { staged, isStaging }
}

View File

@@ -0,0 +1,50 @@
'use client'
import { useEffect, useRef, useState } from 'react'
const TEXT_RENDER_THROTTLE_MS = 100
/**
* Trailing-edge throttle for rendered string values.
*
* The underlying data accumulates instantly via the caller's state, but this
* hook gates DOM re-renders to at most every {@link TEXT_RENDER_THROTTLE_MS}ms.
* When streaming stops (i.e. the value settles), the final value is flushed
* immediately so no trailing content is lost.
*/
export function useThrottledValue(value: string): string {
const [displayed, setDisplayed] = useState(value)
const lastFlushRef = useRef(0)
const timerRef = useRef<ReturnType<typeof setTimeout> | undefined>(undefined)
useEffect(() => {
const now = Date.now()
const remaining = TEXT_RENDER_THROTTLE_MS - (now - lastFlushRef.current)
if (remaining <= 0) {
if (timerRef.current !== undefined) {
clearTimeout(timerRef.current)
timerRef.current = undefined
}
lastFlushRef.current = now
setDisplayed(value)
} else {
if (timerRef.current !== undefined) clearTimeout(timerRef.current)
timerRef.current = setTimeout(() => {
lastFlushRef.current = Date.now()
setDisplayed(value)
timerRef.current = undefined
}, remaining)
}
return () => {
if (timerRef.current !== undefined) {
clearTimeout(timerRef.current)
timerRef.current = undefined
}
}
}, [value])
return displayed
}

View File

@@ -199,6 +199,222 @@ function appendThinkingContent(context: ClientStreamingContext, text: string) {
context.currentTextBlock = null
}
function processContentBuffer(
context: ClientStreamingContext,
get: () => CopilotStore,
set: StoreSet
) {
let contentToProcess = context.pendingContent
let hasProcessedContent = false
const thinkingStartRegex = /<thinking>/
const thinkingEndRegex = /<\/thinking>/
const designWorkflowStartRegex = /<design_workflow>/
const designWorkflowEndRegex = /<\/design_workflow>/
const splitTrailingPartialTag = (
text: string,
tags: string[]
): { text: string; remaining: string } => {
const partialIndex = text.lastIndexOf('<')
if (partialIndex < 0) {
return { text, remaining: '' }
}
const possibleTag = text.substring(partialIndex)
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
if (!matchesTagStart) {
return { text, remaining: '' }
}
return {
text: text.substring(0, partialIndex),
remaining: possibleTag,
}
}
while (contentToProcess.length > 0) {
if (context.isInDesignWorkflowBlock) {
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
if (endMatch) {
const designContent = contentToProcess.substring(0, endMatch.index)
context.designWorkflowContent += designContent
context.isInDesignWorkflowBlock = false
logger.info('[design_workflow] Tag complete, setting plan content', {
contentLength: context.designWorkflowContent.length,
})
set({ streamingPlanContent: context.designWorkflowContent })
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
hasProcessedContent = true
} else {
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
'</design_workflow>',
])
context.designWorkflowContent += text
set({ streamingPlanContent: context.designWorkflowContent })
contentToProcess = remaining
hasProcessedContent = true
if (remaining) {
break
}
}
continue
}
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
if (designStartMatch) {
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
if (textBeforeDesign) {
appendTextBlock(context, textBeforeDesign)
hasProcessedContent = true
}
context.isInDesignWorkflowBlock = true
context.designWorkflowContent = ''
contentToProcess = contentToProcess.substring(
designStartMatch.index + designStartMatch[0].length
)
hasProcessedContent = true
continue
}
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
const hasMark = nextMarkIndex >= 0
const hasCheck = nextCheckIndex >= 0
const nextTagIndex =
hasMark && hasCheck
? Math.min(nextMarkIndex, nextCheckIndex)
: hasMark
? nextMarkIndex
: hasCheck
? nextCheckIndex
: -1
if (nextTagIndex >= 0) {
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
if (closingIndex === -1) {
break
}
const todoId = contentToProcess
.substring(nextTagIndex + tagStart.length, closingIndex)
.trim()
logger.info(
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
{ todoId }
)
if (todoId) {
try {
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
logger.info(
isMarkTodo
? '[TODO] Successfully marked todo in progress'
: '[TODO] Successfully checked off todo',
{ todoId }
)
} catch (e) {
logger.error(
isMarkTodo
? '[TODO] Failed to mark todo in progress'
: '[TODO] Failed to checkoff todo',
{ todoId, error: e }
)
}
} else {
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
}
let beforeTag = contentToProcess.substring(0, nextTagIndex)
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
afterTag = afterTag.replace(/^(\r?\n)+/, '')
contentToProcess = beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
context.currentTextBlock = null
hasProcessedContent = true
continue
}
}
if (context.isInThinkingBlock) {
const endMatch = thinkingEndRegex.exec(contentToProcess)
if (endMatch) {
const thinkingContent = contentToProcess.substring(0, endMatch.index)
appendThinkingContent(context, thinkingContent)
finalizeThinkingBlock(context)
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
hasProcessedContent = true
} else {
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
if (text) {
appendThinkingContent(context, text)
hasProcessedContent = true
}
contentToProcess = remaining
if (remaining) {
break
}
}
} else {
const startMatch = thinkingStartRegex.exec(contentToProcess)
if (startMatch) {
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
if (textBeforeThinking) {
appendTextBlock(context, textBeforeThinking)
hasProcessedContent = true
}
context.isInThinkingBlock = true
context.currentTextBlock = null
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
hasProcessedContent = true
} else {
let partialTagIndex = contentToProcess.lastIndexOf('<')
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
if (partialMarkTodo > partialTagIndex) {
partialTagIndex = partialMarkTodo
}
if (partialCheckoffTodo > partialTagIndex) {
partialTagIndex = partialCheckoffTodo
}
let textToAdd = contentToProcess
let remaining = ''
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
textToAdd = contentToProcess.substring(0, partialTagIndex)
remaining = contentToProcess.substring(partialTagIndex)
}
if (textToAdd) {
appendTextBlock(context, textToAdd)
hasProcessedContent = true
}
contentToProcess = remaining
break
}
}
}
context.pendingContent = contentToProcess
if (hasProcessedContent) {
updateStreamingMessage(set, context)
}
}
export const sseHandlers: Record<string, SSEHandler> = {
chat_id: async (data, context, get, set) => {
context.newChatId = data.chatId
@@ -704,217 +920,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
content: (data, context, get, set) => {
if (!data.data) return
context.pendingContent += data.data
let contentToProcess = context.pendingContent
let hasProcessedContent = false
const thinkingStartRegex = /<thinking>/
const thinkingEndRegex = /<\/thinking>/
const designWorkflowStartRegex = /<design_workflow>/
const designWorkflowEndRegex = /<\/design_workflow>/
const splitTrailingPartialTag = (
text: string,
tags: string[]
): { text: string; remaining: string } => {
const partialIndex = text.lastIndexOf('<')
if (partialIndex < 0) {
return { text, remaining: '' }
}
const possibleTag = text.substring(partialIndex)
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
if (!matchesTagStart) {
return { text, remaining: '' }
}
return {
text: text.substring(0, partialIndex),
remaining: possibleTag,
}
}
while (contentToProcess.length > 0) {
if (context.isInDesignWorkflowBlock) {
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
if (endMatch) {
const designContent = contentToProcess.substring(0, endMatch.index)
context.designWorkflowContent += designContent
context.isInDesignWorkflowBlock = false
logger.info('[design_workflow] Tag complete, setting plan content', {
contentLength: context.designWorkflowContent.length,
})
set({ streamingPlanContent: context.designWorkflowContent })
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
hasProcessedContent = true
} else {
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
'</design_workflow>',
])
context.designWorkflowContent += text
set({ streamingPlanContent: context.designWorkflowContent })
contentToProcess = remaining
hasProcessedContent = true
if (remaining) {
break
}
}
continue
}
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
if (designStartMatch) {
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
if (textBeforeDesign) {
appendTextBlock(context, textBeforeDesign)
hasProcessedContent = true
}
context.isInDesignWorkflowBlock = true
context.designWorkflowContent = ''
contentToProcess = contentToProcess.substring(
designStartMatch.index + designStartMatch[0].length
)
hasProcessedContent = true
continue
}
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
const hasMark = nextMarkIndex >= 0
const hasCheck = nextCheckIndex >= 0
const nextTagIndex =
hasMark && hasCheck
? Math.min(nextMarkIndex, nextCheckIndex)
: hasMark
? nextMarkIndex
: hasCheck
? nextCheckIndex
: -1
if (nextTagIndex >= 0) {
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
if (closingIndex === -1) {
break
}
const todoId = contentToProcess
.substring(nextTagIndex + tagStart.length, closingIndex)
.trim()
logger.info(
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
{ todoId }
)
if (todoId) {
try {
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
logger.info(
isMarkTodo
? '[TODO] Successfully marked todo in progress'
: '[TODO] Successfully checked off todo',
{ todoId }
)
} catch (e) {
logger.error(
isMarkTodo
? '[TODO] Failed to mark todo in progress'
: '[TODO] Failed to checkoff todo',
{ todoId, error: e }
)
}
} else {
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
}
let beforeTag = contentToProcess.substring(0, nextTagIndex)
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
afterTag = afterTag.replace(/^(\r?\n)+/, '')
contentToProcess =
beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
context.currentTextBlock = null
hasProcessedContent = true
continue
}
}
if (context.isInThinkingBlock) {
const endMatch = thinkingEndRegex.exec(contentToProcess)
if (endMatch) {
const thinkingContent = contentToProcess.substring(0, endMatch.index)
appendThinkingContent(context, thinkingContent)
finalizeThinkingBlock(context)
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
hasProcessedContent = true
} else {
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
if (text) {
appendThinkingContent(context, text)
hasProcessedContent = true
}
contentToProcess = remaining
if (remaining) {
break
}
}
} else {
const startMatch = thinkingStartRegex.exec(contentToProcess)
if (startMatch) {
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
if (textBeforeThinking) {
appendTextBlock(context, textBeforeThinking)
hasProcessedContent = true
}
context.isInThinkingBlock = true
context.currentTextBlock = null
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
hasProcessedContent = true
} else {
let partialTagIndex = contentToProcess.lastIndexOf('<')
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
if (partialMarkTodo > partialTagIndex) {
partialTagIndex = partialMarkTodo
}
if (partialCheckoffTodo > partialTagIndex) {
partialTagIndex = partialCheckoffTodo
}
let textToAdd = contentToProcess
let remaining = ''
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
textToAdd = contentToProcess.substring(0, partialTagIndex)
remaining = contentToProcess.substring(partialTagIndex)
}
if (textToAdd) {
appendTextBlock(context, textToAdd)
hasProcessedContent = true
}
contentToProcess = remaining
break
}
}
}
context.pendingContent = contentToProcess
if (hasProcessedContent) {
updateStreamingMessage(set, context)
}
processContentBuffer(context, get, set)
},
done: (_data, context) => {
logger.info('[SSE] DONE EVENT RECEIVED', {

View File

@@ -462,7 +462,7 @@ function prepareSendContext(
if (revertState) {
const currentMessages = get().messages
newMessages = [...currentMessages, userMessage, streamingMessage]
set({ revertState: null, inputValue: '' })
set({ revertState: null })
} else {
const currentMessages = get().messages
const existingIndex = messageId ? currentMessages.findIndex((m) => m.id === messageId) : -1
@@ -1037,7 +1037,6 @@ const initialState = {
chatsLastLoadedAt: null as Date | null,
chatsLoadedForWorkflow: null as string | null,
revertState: null as { messageId: string; messageContent: string } | null,
inputValue: '',
planTodos: [] as Array<{ id: string; content: string; completed?: boolean; executing?: boolean }>,
showPlanTodos: false,
streamingPlanContent: '',
@@ -2222,8 +2221,6 @@ export const useCopilotStore = create<CopilotStore>()(
set(initialState)
},
// Input controls
setInputValue: (value: string) => set({ inputValue: value }),
clearRevertState: () => set({ revertState: null }),
// Todo list (UI only)

View File

@@ -155,7 +155,6 @@ export interface CopilotState {
chatsLoadedForWorkflow: string | null
revertState: { messageId: string; messageContent: string } | null
inputValue: string
planTodos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }>
showPlanTodos: boolean
@@ -235,7 +234,6 @@ export interface CopilotActions {
cleanup: () => void
reset: () => void
setInputValue: (value: string) => void
clearRevertState: () => void
setPlanTodos: (