Merge branch 'dev' into claude/fix-block-search-vector-error-QMbO4

This commit is contained in:
Nicholas Tindle
2026-01-27 10:50:47 -06:00
committed by GitHub
50 changed files with 2135 additions and 1406 deletions

View File

@@ -31,6 +31,7 @@ class ResponseType(str, Enum):
# Other
ERROR = "error"
USAGE = "usage"
HEARTBEAT = "heartbeat"
class StreamBaseResponse(BaseModel):
@@ -142,3 +143,20 @@ class StreamError(StreamBaseResponse):
details: dict[str, Any] | None = Field(
default=None, description="Additional error details"
)
class StreamHeartbeat(StreamBaseResponse):
"""Heartbeat to keep SSE connection alive during long-running operations.
Uses SSE comment format (: comment) which is ignored by clients but keeps
the connection alive through proxies and load balancers.
"""
type: ResponseType = ResponseType.HEARTBEAT
toolCallId: str | None = Field(
default=None, description="Tool call ID if heartbeat is for a specific tool"
)
def to_sse(self) -> str:
"""Convert to SSE comment format to keep connection alive."""
return ": heartbeat\n\n"

File diff suppressed because it is too large Load Diff

View File

@@ -3,8 +3,6 @@
import logging
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from backend.data.understanding import (
BusinessUnderstandingInput,
@@ -61,7 +59,6 @@ and automations for the user's specific needs."""
"""Requires authentication to store user-specific data."""
return True
@observe(as_type="tool", name="add_understanding")
async def _execute(
self,
user_id: str | None,

View File

@@ -5,7 +5,6 @@ import re
from datetime import datetime, timedelta, timezone
from typing import Any
from langfuse import observe
from pydantic import BaseModel, field_validator
from backend.api.features.chat.model import ChatSession
@@ -329,7 +328,6 @@ class AgentOutputTool(BaseTool):
total_executions=len(available_executions) if available_executions else 1,
)
@observe(as_type="tool", name="view_agent_output")
async def _execute(
self,
user_id: str | None,

View File

@@ -3,8 +3,6 @@
import logging
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from .agent_generator import (
@@ -75,7 +73,6 @@ class CreateAgentTool(BaseTool):
"required": ["description"],
}
@observe(as_type="tool", name="create_agent")
async def _execute(
self,
user_id: str | None,
@@ -116,8 +113,11 @@ class CreateAgentTool(BaseTool):
if decomposition_result is None:
return ErrorResponse(
message="Failed to analyze the goal. Please try rephrasing.",
error="Decomposition failed",
message="Failed to analyze the goal. The agent generation service may be unavailable or timed out. Please try again.",
error="decomposition_failed",
details={
"description": description[:100]
}, # Include context for debugging
session_id=session_id,
)
@@ -182,8 +182,11 @@ class CreateAgentTool(BaseTool):
if agent_json is None:
return ErrorResponse(
message="Failed to generate the agent. Please try again.",
error="Generation failed",
message="Failed to generate the agent. The agent generation service may be unavailable or timed out. Please try again.",
error="generation_failed",
details={
"description": description[:100]
}, # Include context for debugging
session_id=session_id,
)

View File

@@ -3,8 +3,6 @@
import logging
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from .agent_generator import (
@@ -81,7 +79,6 @@ class EditAgentTool(BaseTool):
"required": ["agent_id", "changes"],
}
@observe(as_type="tool", name="edit_agent")
async def _execute(
self,
user_id: str | None,
@@ -145,8 +142,9 @@ class EditAgentTool(BaseTool):
if result is None:
return ErrorResponse(
message="Failed to generate changes. Please try rephrasing.",
error="Update generation failed",
message="Failed to generate changes. The agent generation service may be unavailable or timed out. Please try again.",
error="update_generation_failed",
details={"agent_id": agent_id, "changes": changes[:100]},
session_id=session_id,
)

View File

@@ -2,8 +2,6 @@
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from .agent_search import search_agents
@@ -37,7 +35,6 @@ class FindAgentTool(BaseTool):
"required": ["query"],
}
@observe(as_type="tool", name="find_agent")
async def _execute(
self, user_id: str | None, session: ChatSession, **kwargs
) -> ToolResponseBase:

View File

@@ -1,7 +1,6 @@
import logging
from typing import Any
from langfuse import observe
from prisma.enums import ContentType
from backend.api.features.chat.model import ChatSession
@@ -56,7 +55,6 @@ class FindBlockTool(BaseTool):
def requires_auth(self) -> bool:
return True
@observe(as_type="tool", name="find_block")
async def _execute(
self,
user_id: str | None,
@@ -109,7 +107,8 @@ class FindBlockTool(BaseTool):
block_id = result["content_id"]
block = get_block(block_id)
if block:
# Skip disabled blocks
if block and not block.disabled:
# Get input/output schemas
input_schema = {}
output_schema = {}

View File

@@ -2,8 +2,6 @@
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from .agent_search import search_agents
@@ -43,7 +41,6 @@ class FindLibraryAgentTool(BaseTool):
def requires_auth(self) -> bool:
return True
@observe(as_type="tool", name="find_library_agent")
async def _execute(
self, user_id: str | None, session: ChatSession, **kwargs
) -> ToolResponseBase:

View File

@@ -4,8 +4,6 @@ import logging
from pathlib import Path
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from backend.api.features.chat.tools.base import BaseTool
from backend.api.features.chat.tools.models import (
@@ -73,7 +71,6 @@ class GetDocPageTool(BaseTool):
url_path = path.rsplit(".", 1)[0] if "." in path else path
return f"{DOCS_BASE_URL}/{url_path}"
@observe(as_type="tool", name="get_doc_page")
async def _execute(
self,
user_id: str | None,

View File

@@ -3,7 +3,6 @@
import logging
from typing import Any
from langfuse import observe
from pydantic import BaseModel, Field, field_validator
from backend.api.features.chat.config import ChatConfig
@@ -159,7 +158,6 @@ class RunAgentTool(BaseTool):
"""All operations require authentication."""
return True
@observe(as_type="tool", name="run_agent")
async def _execute(
self,
user_id: str | None,

View File

@@ -4,8 +4,6 @@ import logging
from collections import defaultdict
from typing import Any
from langfuse import observe
from backend.api.features.chat.model import ChatSession
from backend.data.block import get_block
from backend.data.execution import ExecutionContext
@@ -130,7 +128,6 @@ class RunBlockTool(BaseTool):
return matched_credentials, missing_credentials
@observe(as_type="tool", name="run_block")
async def _execute(
self,
user_id: str | None,

View File

@@ -3,7 +3,6 @@
import logging
from typing import Any
from langfuse import observe
from prisma.enums import ContentType
from backend.api.features.chat.model import ChatSession
@@ -88,7 +87,6 @@ class SearchDocsTool(BaseTool):
url_path = path.rsplit(".", 1)[0] if "." in path else path
return f"{DOCS_BASE_URL}/{url_path}"
@observe(as_type="tool", name="search_docs")
async def _execute(
self,
user_id: str | None,

View File

@@ -164,9 +164,9 @@ async def test_process_review_action_approve_success(
"""Test successful review approval"""
# Mock the route functions
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
@@ -244,9 +244,9 @@ async def test_process_review_action_reject_success(
"""Test successful review rejection"""
# Mock the route functions
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
@@ -339,9 +339,9 @@ async def test_process_review_action_mixed_success(
# Mock the route functions
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {
"test_node_123": sample_pending_review,
@@ -463,9 +463,9 @@ async def test_process_review_action_review_not_found(
test_user_id: str,
) -> None:
"""Test error when review is not found"""
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
# Return empty dict to simulate review not found
mock_get_reviews_for_user.return_value = {}
@@ -506,7 +506,7 @@ async def test_process_review_action_review_not_found(
response = await client.post("/api/review/action", json=request_data)
assert response.status_code == 404
assert "No pending review found" in response.json()["detail"]
assert "Review(s) not found" in response.json()["detail"]
@pytest.mark.asyncio(loop_scope="session")
@@ -517,9 +517,9 @@ async def test_process_review_action_partial_failure(
test_user_id: str,
) -> None:
"""Test handling of partial failures in review processing"""
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
@@ -567,9 +567,9 @@ async def test_process_review_action_invalid_node_exec_id(
test_user_id: str,
) -> None:
"""Test failure when trying to process review with invalid node execution ID"""
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
# Return empty dict to simulate review not found
mock_get_reviews_for_user.return_value = {}
@@ -596,7 +596,7 @@ async def test_process_review_action_invalid_node_exec_id(
# Returns 404 when review is not found
assert response.status_code == 404
assert "No pending review found" in response.json()["detail"]
assert "Review(s) not found" in response.json()["detail"]
@pytest.mark.asyncio(loop_scope="session")
@@ -607,9 +607,9 @@ async def test_process_review_action_auto_approve_creates_auto_approval_records(
test_user_id: str,
) -> None:
"""Test that auto_approve_future_actions flag creates auto-approval records"""
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
@@ -737,9 +737,9 @@ async def test_process_review_action_without_auto_approve_still_loads_settings(
test_user_id: str,
) -> None:
"""Test that execution context is created with settings even without auto-approve"""
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
@@ -885,9 +885,9 @@ async def test_process_review_action_auto_approve_only_applies_to_approved_revie
reviewed_at=FIXED_NOW,
)
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
# Need to return both reviews in WAITING state (before processing)
approved_review_waiting = PendingHumanReviewModel(
@@ -1031,9 +1031,9 @@ async def test_process_review_action_per_review_auto_approve_granularity(
test_user_id: str,
) -> None:
"""Test that auto-approval can be set per-review (granular control)"""
# Mock get_pending_reviews_by_node_exec_ids - return different reviews based on node_exec_id
# Mock get_reviews_by_node_exec_ids - return different reviews based on node_exec_id
mock_get_reviews_for_user = mocker.patch(
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
)
# Create a mapping of node_exec_id to review

View File

@@ -14,9 +14,9 @@ from backend.data.execution import (
from backend.data.graph import get_graph_settings
from backend.data.human_review import (
create_auto_approval_record,
get_pending_reviews_by_node_exec_ids,
get_pending_reviews_for_execution,
get_pending_reviews_for_user,
get_reviews_by_node_exec_ids,
has_pending_reviews_for_graph_exec,
process_all_reviews_for_execution,
)
@@ -137,17 +137,17 @@ async def process_review_action(
detail="At least one review must be provided",
)
# Batch fetch all requested reviews
reviews_map = await get_pending_reviews_by_node_exec_ids(
# Batch fetch all requested reviews (regardless of status for idempotent handling)
reviews_map = await get_reviews_by_node_exec_ids(
list(all_request_node_ids), user_id
)
# Validate all reviews were found
# Validate all reviews were found (must exist, any status is OK for now)
missing_ids = all_request_node_ids - set(reviews_map.keys())
if missing_ids:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"No pending review found for node execution(s): {', '.join(missing_ids)}",
detail=f"Review(s) not found: {', '.join(missing_ids)}",
)
# Validate all reviews belong to the same execution

View File

@@ -188,6 +188,10 @@ class BlockHandler(ContentHandler):
try:
block_instance = block_cls()
# Skip disabled blocks - they shouldn't be indexed
if block_instance.disabled:
continue
# Build searchable text from block metadata
parts = []
if hasattr(block_instance, "name") and block_instance.name:
@@ -248,12 +252,19 @@ class BlockHandler(ContentHandler):
from backend.data.block import get_blocks
all_blocks = get_blocks()
total_blocks = len(all_blocks)
# Filter out disabled blocks - they're not indexed
enabled_block_ids = [
block_id
for block_id, block_cls in all_blocks.items()
if not block_cls().disabled
]
total_blocks = len(enabled_block_ids)
if total_blocks == 0:
return {"total": 0, "with_embeddings": 0, "without_embeddings": 0}
block_ids = list(all_blocks.keys())
block_ids = enabled_block_ids
placeholders = ",".join([f"${i+1}" for i in range(len(block_ids))])
embedded_result = await query_raw_with_schema(

View File

@@ -81,6 +81,7 @@ async def test_block_handler_get_missing_items(mocker):
mock_block_instance.name = "Calculator Block"
mock_block_instance.description = "Performs calculations"
mock_block_instance.categories = [MagicMock(value="MATH")]
mock_block_instance.disabled = False
mock_block_instance.input_schema.model_json_schema.return_value = {
"properties": {"expression": {"description": "Math expression to evaluate"}}
}
@@ -116,11 +117,18 @@ async def test_block_handler_get_stats(mocker):
"""Test BlockHandler returns correct stats."""
handler = BlockHandler()
# Mock get_blocks
# Mock get_blocks - each block class returns an instance with disabled=False
def make_mock_block_class():
mock_class = MagicMock()
mock_instance = MagicMock()
mock_instance.disabled = False
mock_class.return_value = mock_instance
return mock_class
mock_blocks = {
"block-1": MagicMock(),
"block-2": MagicMock(),
"block-3": MagicMock(),
"block-1": make_mock_block_class(),
"block-2": make_mock_block_class(),
"block-3": make_mock_block_class(),
}
# Mock embedded count query (2 blocks have embeddings)
@@ -309,6 +317,7 @@ async def test_block_handler_handles_missing_attributes():
mock_block_class = MagicMock()
mock_block_instance = MagicMock()
mock_block_instance.name = "Minimal Block"
mock_block_instance.disabled = False
# No description, categories, or schema
del mock_block_instance.description
del mock_block_instance.categories
@@ -342,6 +351,7 @@ async def test_block_handler_skips_failed_blocks():
good_instance.name = "Good Block"
good_instance.description = "Works fine"
good_instance.categories = []
good_instance.disabled = False
good_block.return_value = good_instance
bad_block = MagicMock()

View File

@@ -263,11 +263,14 @@ async def get_pending_review_by_node_exec_id(
return PendingHumanReviewModel.from_db(review, node_id=node_id)
async def get_pending_reviews_by_node_exec_ids(
async def get_reviews_by_node_exec_ids(
node_exec_ids: list[str], user_id: str
) -> dict[str, "PendingHumanReviewModel"]:
"""
Get multiple pending reviews by their node execution IDs in a single batch query.
Get multiple reviews by their node execution IDs regardless of status.
Unlike get_pending_reviews_by_node_exec_ids, this returns reviews in any status
(WAITING, APPROVED, REJECTED). Used for validation in idempotent operations.
Args:
node_exec_ids: List of node execution IDs to look up
@@ -283,7 +286,6 @@ async def get_pending_reviews_by_node_exec_ids(
where={
"nodeExecId": {"in": node_exec_ids},
"userId": user_id,
"status": ReviewStatus.WAITING,
}
)
@@ -407,38 +409,68 @@ async def process_all_reviews_for_execution(
) -> dict[str, PendingHumanReviewModel]:
"""Process all pending reviews for an execution with approve/reject decisions.
Handles race conditions gracefully: if a review was already processed with the
same decision by a concurrent request, it's treated as success rather than error.
Args:
user_id: User ID for ownership validation
review_decisions: Map of node_exec_id -> (status, reviewed_data, message)
Returns:
Dict of node_exec_id -> updated review model
Dict of node_exec_id -> updated review model (includes already-processed reviews)
"""
if not review_decisions:
return {}
node_exec_ids = list(review_decisions.keys())
# Get all reviews for validation
reviews = await PendingHumanReview.prisma().find_many(
# Get all reviews (both WAITING and already processed) for the user
all_reviews = await PendingHumanReview.prisma().find_many(
where={
"nodeExecId": {"in": node_exec_ids},
"userId": user_id,
"status": ReviewStatus.WAITING,
},
)
# Validate all reviews can be processed
if len(reviews) != len(node_exec_ids):
missing_ids = set(node_exec_ids) - {review.nodeExecId for review in reviews}
# Separate into pending and already-processed reviews
reviews_to_process = []
already_processed = []
for review in all_reviews:
if review.status == ReviewStatus.WAITING:
reviews_to_process.append(review)
else:
already_processed.append(review)
# Check for truly missing reviews (not found at all)
found_ids = {review.nodeExecId for review in all_reviews}
missing_ids = set(node_exec_ids) - found_ids
if missing_ids:
raise ValueError(
f"Reviews not found, access denied, or not in WAITING status: {', '.join(missing_ids)}"
f"Reviews not found or access denied: {', '.join(missing_ids)}"
)
# Create parallel update tasks
# Validate already-processed reviews have compatible status (same decision)
# This handles race conditions where another request processed the same reviews
for review in already_processed:
requested_status = review_decisions[review.nodeExecId][0]
if review.status != requested_status:
raise ValueError(
f"Review {review.nodeExecId} was already processed with status "
f"{review.status}, cannot change to {requested_status}"
)
# Log if we're handling a race condition (some reviews already processed)
if already_processed:
already_processed_ids = [r.nodeExecId for r in already_processed]
logger.info(
f"Race condition handled: {len(already_processed)} review(s) already "
f"processed by concurrent request: {already_processed_ids}"
)
# Create parallel update tasks for reviews that still need processing
update_tasks = []
for review in reviews:
for review in reviews_to_process:
new_status, reviewed_data, message = review_decisions[review.nodeExecId]
has_data_changes = reviewed_data is not None and reviewed_data != review.payload
@@ -463,7 +495,7 @@ async def process_all_reviews_for_execution(
update_tasks.append(task)
# Execute all updates in parallel and get updated reviews
updated_reviews = await asyncio.gather(*update_tasks)
updated_reviews = await asyncio.gather(*update_tasks) if update_tasks else []
# Note: Execution resumption is now handled at the API layer after ALL reviews
# for an execution are processed (both approved and rejected)
@@ -472,8 +504,11 @@ async def process_all_reviews_for_execution(
# Local import to avoid event loop conflicts in tests
from backend.data.execution import get_node_execution
# Combine updated reviews with already-processed ones (for idempotent response)
all_result_reviews = list(updated_reviews) + already_processed
result = {}
for review in updated_reviews:
for review in all_result_reviews:
node_exec = await get_node_execution(review.nodeExecId)
node_id = node_exec.node_id if node_exec else review.nodeExecId
result[review.nodeExecId] = PendingHumanReviewModel.from_db(

View File

@@ -1,41 +0,0 @@
"use client";
import { createContext, useContext, useRef, type ReactNode } from "react";
interface NewChatContextValue {
onNewChatClick: () => void;
setOnNewChatClick: (handler?: () => void) => void;
performNewChat?: () => void;
setPerformNewChat: (handler?: () => void) => void;
}
const NewChatContext = createContext<NewChatContextValue | null>(null);
export function NewChatProvider({ children }: { children: ReactNode }) {
const onNewChatRef = useRef<(() => void) | undefined>();
const performNewChatRef = useRef<(() => void) | undefined>();
const contextValueRef = useRef<NewChatContextValue>({
onNewChatClick() {
onNewChatRef.current?.();
},
setOnNewChatClick(handler?: () => void) {
onNewChatRef.current = handler;
},
performNewChat() {
performNewChatRef.current?.();
},
setPerformNewChat(handler?: () => void) {
performNewChatRef.current = handler;
},
});
return (
<NewChatContext.Provider value={contextValueRef.current}>
{children}
</NewChatContext.Provider>
);
}
export function useNewChat() {
return useContext(NewChatContext);
}

View File

@@ -4,7 +4,7 @@ import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/C
import { NAVBAR_HEIGHT_PX } from "@/lib/constants";
import type { ReactNode } from "react";
import { useEffect } from "react";
import { useNewChat } from "../../NewChatContext";
import { useCopilotStore } from "../../copilot-page-store";
import { DesktopSidebar } from "./components/DesktopSidebar/DesktopSidebar";
import { LoadingState } from "./components/LoadingState/LoadingState";
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
@@ -35,21 +35,23 @@ export function CopilotShell({ children }: Props) {
isReadyToShowContent,
} = useCopilotShell();
const newChatContext = useNewChat();
const handleNewChatClickWrapper =
newChatContext?.onNewChatClick || handleNewChat;
const setNewChatHandler = useCopilotStore((s) => s.setNewChatHandler);
const requestNewChat = useCopilotStore((s) => s.requestNewChat);
useEffect(
function registerNewChatHandler() {
if (!newChatContext) return;
newChatContext.setPerformNewChat(handleNewChat);
setNewChatHandler(handleNewChat);
return function cleanup() {
newChatContext.setPerformNewChat(undefined);
setNewChatHandler(null);
};
},
[newChatContext, handleNewChat],
[handleNewChat],
);
function handleNewChatClick() {
requestNewChat();
}
if (!isLoggedIn) {
return (
<div className="flex h-full items-center justify-center">
@@ -72,7 +74,7 @@ export function CopilotShell({ children }: Props) {
isFetchingNextPage={isFetchingNextPage}
onSelectSession={handleSelectSession}
onFetchNextPage={fetchNextPage}
onNewChat={handleNewChatClickWrapper}
onNewChat={handleNewChatClick}
hasActiveSession={Boolean(hasActiveSession)}
/>
)}
@@ -94,7 +96,7 @@ export function CopilotShell({ children }: Props) {
isFetchingNextPage={isFetchingNextPage}
onSelectSession={handleSelectSession}
onFetchNextPage={fetchNextPage}
onNewChat={handleNewChatClickWrapper}
onNewChat={handleNewChatClick}
onClose={handleCloseDrawer}
onOpenChange={handleDrawerOpenChange}
hasActiveSession={Boolean(hasActiveSession)}

View File

@@ -1,7 +1,12 @@
import { useGetV2ListSessions } from "@/app/api/__generated__/endpoints/chat/chat";
import {
getGetV2ListSessionsQueryKey,
useGetV2ListSessions,
} from "@/app/api/__generated__/endpoints/chat/chat";
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
import { okData } from "@/app/api/helpers";
import { useEffect, useMemo, useState } from "react";
import { useChatStore } from "@/components/contextual/Chat/chat-store";
import { useQueryClient } from "@tanstack/react-query";
import { useEffect, useState } from "react";
const PAGE_SIZE = 50;
@@ -15,6 +20,8 @@ export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) {
SessionSummaryResponse[]
>([]);
const [totalCount, setTotalCount] = useState<number | null>(null);
const queryClient = useQueryClient();
const onStreamComplete = useChatStore((state) => state.onStreamComplete);
const { data, isLoading, isFetching, isError } = useGetV2ListSessions(
{ limit: PAGE_SIZE, offset },
@@ -25,35 +32,47 @@ export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) {
},
);
useEffect(() => {
const responseData = okData(data);
if (responseData) {
const newSessions = responseData.sessions;
const total = responseData.total;
setTotalCount(total);
if (offset === 0) {
setAccumulatedSessions(newSessions);
} else {
setAccumulatedSessions((prev) => [...prev, ...newSessions]);
}
} else if (!enabled) {
useEffect(function refreshOnStreamComplete() {
const unsubscribe = onStreamComplete(function handleStreamComplete() {
setOffset(0);
setAccumulatedSessions([]);
setTotalCount(null);
}
}, [data, offset, enabled]);
queryClient.invalidateQueries({
queryKey: getGetV2ListSessionsQueryKey(),
});
});
return unsubscribe;
}, []);
const hasNextPage = useMemo(() => {
if (totalCount === null) return false;
return accumulatedSessions.length < totalCount;
}, [accumulatedSessions.length, totalCount]);
useEffect(
function updateSessionsFromResponse() {
const responseData = okData(data);
if (responseData) {
const newSessions = responseData.sessions;
const total = responseData.total;
setTotalCount(total);
const areAllSessionsLoaded = useMemo(() => {
if (totalCount === null) return false;
return (
accumulatedSessions.length >= totalCount && !isFetching && !isLoading
);
}, [accumulatedSessions.length, totalCount, isFetching, isLoading]);
if (offset === 0) {
setAccumulatedSessions(newSessions);
} else {
setAccumulatedSessions((prev) => [...prev, ...newSessions]);
}
} else if (!enabled) {
setAccumulatedSessions([]);
setTotalCount(null);
}
},
[data, offset, enabled],
);
const hasNextPage =
totalCount !== null && accumulatedSessions.length < totalCount;
const areAllSessionsLoaded =
totalCount !== null &&
accumulatedSessions.length >= totalCount &&
!isFetching &&
!isLoading;
useEffect(() => {
if (

View File

@@ -2,9 +2,7 @@ import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessi
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
import { format, formatDistanceToNow, isToday } from "date-fns";
export function convertSessionDetailToSummary(
session: SessionDetailResponse,
): SessionSummaryResponse {
export function convertSessionDetailToSummary(session: SessionDetailResponse) {
return {
id: session.id,
created_at: session.created_at,
@@ -13,17 +11,25 @@ export function convertSessionDetailToSummary(
};
}
export function filterVisibleSessions(
sessions: SessionSummaryResponse[],
): SessionSummaryResponse[] {
return sessions.filter(
(session) => session.updated_at !== session.created_at,
);
export function filterVisibleSessions(sessions: SessionSummaryResponse[]) {
const fiveMinutesAgo = Date.now() - 5 * 60 * 1000;
return sessions.filter((session) => {
const hasBeenUpdated = session.updated_at !== session.created_at;
if (hasBeenUpdated) return true;
const isRecentlyCreated =
new Date(session.created_at).getTime() > fiveMinutesAgo;
return isRecentlyCreated;
});
}
export function getSessionTitle(session: SessionSummaryResponse): string {
export function getSessionTitle(session: SessionSummaryResponse) {
if (session.title) return session.title;
const isNewSession = session.updated_at === session.created_at;
if (isNewSession) {
const createdDate = new Date(session.created_at);
if (isToday(createdDate)) {
@@ -31,12 +37,11 @@ export function getSessionTitle(session: SessionSummaryResponse): string {
}
return format(createdDate, "MMM d, yyyy");
}
return "Untitled Chat";
}
export function getSessionUpdatedLabel(
session: SessionSummaryResponse,
): string {
export function getSessionUpdatedLabel(session: SessionSummaryResponse) {
if (!session.updated_at) return "";
return formatDistanceToNow(new Date(session.updated_at), { addSuffix: true });
}
@@ -45,8 +50,10 @@ export function mergeCurrentSessionIntoList(
accumulatedSessions: SessionSummaryResponse[],
currentSessionId: string | null,
currentSessionData: SessionDetailResponse | null | undefined,
): SessionSummaryResponse[] {
recentlyCreatedSessions?: Map<string, SessionSummaryResponse>,
) {
const filteredSessions: SessionSummaryResponse[] = [];
const addedIds = new Set<string>();
if (accumulatedSessions.length > 0) {
const visibleSessions = filterVisibleSessions(accumulatedSessions);
@@ -61,29 +68,40 @@ export function mergeCurrentSessionIntoList(
);
if (!isInVisible) {
filteredSessions.push(currentInAll);
addedIds.add(currentInAll.id);
}
}
}
filteredSessions.push(...visibleSessions);
for (const session of visibleSessions) {
if (!addedIds.has(session.id)) {
filteredSessions.push(session);
addedIds.add(session.id);
}
}
}
if (currentSessionId && currentSessionData) {
const isCurrentInList = filteredSessions.some(
(s) => s.id === currentSessionId,
);
if (!isCurrentInList) {
if (!addedIds.has(currentSessionId)) {
const summarySession = convertSessionDetailToSummary(currentSessionData);
filteredSessions.unshift(summarySession);
addedIds.add(currentSessionId);
}
}
if (recentlyCreatedSessions) {
for (const [sessionId, sessionData] of recentlyCreatedSessions) {
if (!addedIds.has(sessionId)) {
filteredSessions.unshift(sessionData);
addedIds.add(sessionId);
}
}
}
return filteredSessions;
}
export function getCurrentSessionId(
searchParams: URLSearchParams,
): string | null {
export function getCurrentSessionId(searchParams: URLSearchParams) {
return searchParams.get("sessionId");
}
@@ -95,11 +113,7 @@ export function shouldAutoSelectSession(
accumulatedSessions: SessionSummaryResponse[],
isLoading: boolean,
totalCount: number | null,
): {
shouldSelect: boolean;
sessionIdToSelect: string | null;
shouldCreate: boolean;
} {
) {
if (!areAllSessionsLoaded || hasAutoSelectedSession) {
return {
shouldSelect: false,
@@ -146,7 +160,7 @@ export function checkReadyToShowContent(
isCurrentSessionLoading: boolean,
currentSessionData: SessionDetailResponse | null | undefined,
hasAutoSelectedSession: boolean,
): boolean {
) {
if (!areAllSessionsLoaded) return false;
if (paramSessionId) {

View File

@@ -4,23 +4,25 @@ import {
getGetV2ListSessionsQueryKey,
useGetV2GetSession,
} from "@/app/api/__generated__/endpoints/chat/chat";
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
import { okData } from "@/app/api/helpers";
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useQueryClient } from "@tanstack/react-query";
import { usePathname, useRouter, useSearchParams } from "next/navigation";
import { parseAsString, useQueryState } from "nuqs";
import { usePathname, useSearchParams } from "next/navigation";
import { useEffect, useRef, useState } from "react";
import { useMobileDrawer } from "./components/MobileDrawer/useMobileDrawer";
import { useSessionsPagination } from "./components/SessionsList/useSessionsPagination";
import {
checkReadyToShowContent,
convertSessionDetailToSummary,
filterVisibleSessions,
getCurrentSessionId,
mergeCurrentSessionIntoList,
} from "./helpers";
export function useCopilotShell() {
const router = useRouter();
const pathname = usePathname();
const searchParams = useSearchParams();
const queryClient = useQueryClient();
@@ -29,6 +31,8 @@ export function useCopilotShell() {
const isMobile =
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
const [, setUrlSessionId] = useQueryState("sessionId", parseAsString);
const isOnHomepage = pathname === "/copilot";
const paramSessionId = searchParams.get("sessionId");
@@ -65,6 +69,9 @@ export function useCopilotShell() {
const [hasAutoSelectedSession, setHasAutoSelectedSession] = useState(false);
const hasAutoSelectedRef = useRef(false);
const recentlyCreatedSessionsRef = useRef<
Map<string, SessionSummaryResponse>
>(new Map());
// Mark as auto-selected when sessionId is in URL
useEffect(() => {
@@ -91,6 +98,30 @@ export function useCopilotShell() {
}
}, [isOnHomepage, paramSessionId, queryClient]);
// Track newly created sessions to ensure they stay visible even when switching away
useEffect(() => {
if (currentSessionId && currentSessionData) {
const isNewSession =
currentSessionData.updated_at === currentSessionData.created_at;
const isNotInAccumulated = !accumulatedSessions.some(
(s) => s.id === currentSessionId,
);
if (isNewSession || isNotInAccumulated) {
const summary = convertSessionDetailToSummary(currentSessionData);
recentlyCreatedSessionsRef.current.set(currentSessionId, summary);
}
}
}, [currentSessionId, currentSessionData, accumulatedSessions]);
// Clean up recently created sessions that are now in the accumulated list
useEffect(() => {
for (const sessionId of recentlyCreatedSessionsRef.current.keys()) {
if (accumulatedSessions.some((s) => s.id === sessionId)) {
recentlyCreatedSessionsRef.current.delete(sessionId);
}
}
}, [accumulatedSessions]);
// Reset pagination when query becomes disabled
const prevPaginationEnabledRef = useRef(paginationEnabled);
useEffect(() => {
@@ -105,6 +136,7 @@ export function useCopilotShell() {
accumulatedSessions,
currentSessionId,
currentSessionData,
recentlyCreatedSessionsRef.current,
);
const visibleSessions = filterVisibleSessions(sessions);
@@ -124,22 +156,17 @@ export function useCopilotShell() {
);
function handleSelectSession(sessionId: string) {
// Navigate using replaceState to avoid full page reload
window.history.replaceState(null, "", `/copilot?sessionId=${sessionId}`);
// Force a re-render by updating the URL through router
router.replace(`/copilot?sessionId=${sessionId}`);
setUrlSessionId(sessionId, { shallow: false });
if (isMobile) handleCloseDrawer();
}
function handleNewChat() {
resetAutoSelect();
resetPagination();
// Invalidate and refetch sessions list to ensure newly created sessions appear
queryClient.invalidateQueries({
queryKey: getGetV2ListSessionsQueryKey(),
});
window.history.replaceState(null, "", "/copilot");
router.replace("/copilot");
setUrlSessionId(null, { shallow: false });
if (isMobile) handleCloseDrawer();
}

View File

@@ -0,0 +1,54 @@
"use client";
import { create } from "zustand";
interface CopilotStoreState {
isStreaming: boolean;
isNewChatModalOpen: boolean;
newChatHandler: (() => void) | null;
}
interface CopilotStoreActions {
setIsStreaming: (isStreaming: boolean) => void;
setNewChatHandler: (handler: (() => void) | null) => void;
requestNewChat: () => void;
confirmNewChat: () => void;
cancelNewChat: () => void;
}
type CopilotStore = CopilotStoreState & CopilotStoreActions;
export const useCopilotStore = create<CopilotStore>((set, get) => ({
isStreaming: false,
isNewChatModalOpen: false,
newChatHandler: null,
setIsStreaming(isStreaming) {
set({ isStreaming });
},
setNewChatHandler(handler) {
set({ newChatHandler: handler });
},
requestNewChat() {
const { isStreaming, newChatHandler } = get();
if (isStreaming) {
set({ isNewChatModalOpen: true });
} else if (newChatHandler) {
newChatHandler();
}
},
confirmNewChat() {
const { newChatHandler } = get();
set({ isNewChatModalOpen: false });
if (newChatHandler) {
newChatHandler();
}
},
cancelNewChat() {
set({ isNewChatModalOpen: false });
},
}));

View File

@@ -1,11 +1,6 @@
import type { ReactNode } from "react";
import { NewChatProvider } from "./NewChatContext";
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
export default function CopilotLayout({ children }: { children: ReactNode }) {
return (
<NewChatProvider>
<CopilotShell>{children}</CopilotShell>
</NewChatProvider>
);
return <CopilotShell>{children}</CopilotShell>;
}

View File

@@ -1,16 +1,19 @@
"use client";
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
import { Button } from "@/components/atoms/Button/Button";
import { Skeleton } from "@/components/atoms/Skeleton/Skeleton";
import { Text } from "@/components/atoms/Text/Text";
import { Chat } from "@/components/contextual/Chat/Chat";
import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput";
import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useCopilotStore } from "./copilot-page-store";
import { useCopilotPage } from "./useCopilotPage";
export default function CopilotPage() {
const { state, handlers } = useCopilotPage();
const confirmNewChat = useCopilotStore((s) => s.confirmNewChat);
const {
greetingName,
quickActions,
@@ -25,15 +28,11 @@ export default function CopilotPage() {
handleSessionNotFound,
handleStreamingChange,
handleCancelNewChat,
proceedWithNewChat,
handleNewChatModalOpen,
} = handlers;
if (!isReady) {
return null;
}
if (!isReady) return null;
// Show Chat when we have an active session
if (pageState.type === "chat") {
return (
<div className="flex h-full flex-col">
@@ -71,7 +70,7 @@ export default function CopilotPage() {
<Button
type="button"
variant="primary"
onClick={proceedWithNewChat}
onClick={confirmNewChat}
>
Start new chat
</Button>
@@ -83,7 +82,7 @@ export default function CopilotPage() {
);
}
if (pageState.type === "newChat") {
if (pageState.type === "newChat" || pageState.type === "creating") {
return (
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
<div className="flex flex-col items-center gap-4">
@@ -96,21 +95,6 @@ export default function CopilotPage() {
);
}
// Show loading state while creating session and sending first message
if (pageState.type === "creating") {
return (
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
<div className="flex flex-col items-center gap-4">
<ChatLoader />
<Text variant="body" className="text-zinc-500">
Loading your chats...
</Text>
</div>
</div>
);
}
// Show Welcome screen
return (
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-6 py-10">
<div className="w-full text-center">

View File

@@ -1,4 +1,7 @@
import { postV2CreateSession } from "@/app/api/__generated__/endpoints/chat/chat";
import {
getGetV2ListSessionsQueryKey,
postV2CreateSession,
} from "@/app/api/__generated__/endpoints/chat/chat";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { getHomepageRoute } from "@/lib/constants";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
@@ -8,25 +11,22 @@ import {
useGetFlag,
} from "@/services/feature-flags/use-get-flag";
import * as Sentry from "@sentry/nextjs";
import { useQueryClient } from "@tanstack/react-query";
import { useFlags } from "launchdarkly-react-client-sdk";
import { useRouter } from "next/navigation";
import { useEffect, useReducer } from "react";
import { useNewChat } from "./NewChatContext";
import { useCopilotStore } from "./copilot-page-store";
import { getGreetingName, getQuickActions, type PageState } from "./helpers";
import { useCopilotURLState } from "./useCopilotURLState";
type CopilotState = {
pageState: PageState;
isStreaming: boolean;
isNewChatModalOpen: boolean;
initialPrompts: Record<string, string>;
previousSessionId: string | null;
};
type CopilotAction =
| { type: "setPageState"; pageState: PageState }
| { type: "setStreaming"; isStreaming: boolean }
| { type: "setNewChatModalOpen"; isOpen: boolean }
| { type: "setInitialPrompt"; sessionId: string; prompt: string }
| { type: "setPreviousSessionId"; sessionId: string | null };
@@ -52,14 +52,6 @@ function copilotReducer(
if (isSamePageState(action.pageState, state.pageState)) return state;
return { ...state, pageState: action.pageState };
}
if (action.type === "setStreaming") {
if (action.isStreaming === state.isStreaming) return state;
return { ...state, isStreaming: action.isStreaming };
}
if (action.type === "setNewChatModalOpen") {
if (action.isOpen === state.isNewChatModalOpen) return state;
return { ...state, isNewChatModalOpen: action.isOpen };
}
if (action.type === "setInitialPrompt") {
if (state.initialPrompts[action.sessionId] === action.prompt) return state;
return {
@@ -79,9 +71,14 @@ function copilotReducer(
export function useCopilotPage() {
const router = useRouter();
const queryClient = useQueryClient();
const { user, isLoggedIn, isUserLoading } = useSupabase();
const { toast } = useToast();
const isNewChatModalOpen = useCopilotStore((s) => s.isNewChatModalOpen);
const setIsStreaming = useCopilotStore((s) => s.setIsStreaming);
const cancelNewChat = useCopilotStore((s) => s.cancelNewChat);
const isChatEnabled = useGetFlag(Flag.CHAT);
const flags = useFlags<FlagValues>();
const homepageRoute = getHomepageRoute(isChatEnabled);
@@ -93,13 +90,10 @@ export function useCopilotPage() {
const [state, dispatch] = useReducer(copilotReducer, {
pageState: { type: "welcome" },
isStreaming: false,
isNewChatModalOpen: false,
initialPrompts: {},
previousSessionId: null,
});
const newChatContext = useNewChat();
const greetingName = getGreetingName(user);
const quickActions = getQuickActions();
@@ -124,17 +118,6 @@ export function useCopilotPage() {
setPreviousSessionId,
});
useEffect(
function registerNewChatHandler() {
if (!newChatContext) return;
newChatContext.setOnNewChatClick(handleNewChatClick);
return function cleanup() {
newChatContext.setOnNewChatClick(undefined);
};
},
[newChatContext, handleNewChatClick],
);
useEffect(
function transitionNewChatToWelcome() {
if (state.pageState.type === "newChat") {
@@ -189,6 +172,10 @@ export function useCopilotPage() {
prompt: trimmedPrompt,
});
await queryClient.invalidateQueries({
queryKey: getGetV2ListSessionsQueryKey(),
});
await setUrlSessionId(sessionId, { shallow: false });
dispatch({
type: "setPageState",
@@ -211,37 +198,15 @@ export function useCopilotPage() {
}
function handleStreamingChange(isStreamingValue: boolean) {
dispatch({ type: "setStreaming", isStreaming: isStreamingValue });
}
async function proceedWithNewChat() {
dispatch({ type: "setNewChatModalOpen", isOpen: false });
if (newChatContext?.performNewChat) {
newChatContext.performNewChat();
return;
}
try {
await setUrlSessionId(null, { shallow: false });
} catch (error) {
console.error("[CopilotPage] Failed to clear session:", error);
}
router.replace("/copilot");
setIsStreaming(isStreamingValue);
}
function handleCancelNewChat() {
dispatch({ type: "setNewChatModalOpen", isOpen: false });
cancelNewChat();
}
function handleNewChatModalOpen(isOpen: boolean) {
dispatch({ type: "setNewChatModalOpen", isOpen });
}
function handleNewChatClick() {
if (state.isStreaming) {
dispatch({ type: "setNewChatModalOpen", isOpen: true });
} else {
proceedWithNewChat();
}
if (!isOpen) cancelNewChat();
}
return {
@@ -250,7 +215,7 @@ export function useCopilotPage() {
quickActions,
isLoading: isUserLoading,
pageState: state.pageState,
isNewChatModalOpen: state.isNewChatModalOpen,
isNewChatModalOpen,
isReady: isFlagReady && isChatEnabled !== false && isLoggedIn,
},
handlers: {
@@ -259,7 +224,6 @@ export function useCopilotPage() {
handleSessionNotFound,
handleStreamingChange,
handleCancelNewChat,
proceedWithNewChat,
handleNewChatModalOpen,
},
};

View File

@@ -1,10 +1,12 @@
import { Navbar } from "@/components/layout/Navbar/Navbar";
import { NetworkStatusMonitor } from "@/services/network-status/NetworkStatusMonitor";
import { ReactNode } from "react";
import { AdminImpersonationBanner } from "./admin/components/AdminImpersonationBanner";
export default function PlatformLayout({ children }: { children: ReactNode }) {
return (
<main className="flex h-screen w-full flex-col">
<NetworkStatusMonitor />
<Navbar />
<AdminImpersonationBanner />
<section className="flex-1">{children}</section>

View File

@@ -0,0 +1,14 @@
import { cn } from "@/lib/utils";
interface Props extends React.HTMLAttributes<HTMLDivElement> {
className?: string;
}
export function Skeleton({ className, ...props }: Props) {
return (
<div
className={cn("animate-pulse rounded-md bg-zinc-100", className)}
{...props}
/>
);
}

View File

@@ -1,4 +1,4 @@
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
import { Skeleton } from "./Skeleton";
import type { Meta, StoryObj } from "@storybook/nextjs";
const meta: Meta<typeof Skeleton> = {

View File

@@ -0,0 +1,234 @@
"use client";
import { create } from "zustand";
import type {
ActiveStream,
StreamChunk,
StreamCompleteCallback,
StreamResult,
StreamStatus,
} from "./chat-types";
import { executeStream } from "./stream-executor";
const COMPLETED_STREAM_TTL = 5 * 60 * 1000; // 5 minutes
interface ChatStoreState {
activeStreams: Map<string, ActiveStream>;
completedStreams: Map<string, StreamResult>;
activeSessions: Set<string>;
streamCompleteCallbacks: Set<StreamCompleteCallback>;
}
interface ChatStoreActions {
startStream: (
sessionId: string,
message: string,
isUserMessage: boolean,
context?: { url: string; content: string },
onChunk?: (chunk: StreamChunk) => void,
) => Promise<void>;
stopStream: (sessionId: string) => void;
subscribeToStream: (
sessionId: string,
onChunk: (chunk: StreamChunk) => void,
skipReplay?: boolean,
) => () => void;
getStreamStatus: (sessionId: string) => StreamStatus;
getCompletedStream: (sessionId: string) => StreamResult | undefined;
clearCompletedStream: (sessionId: string) => void;
isStreaming: (sessionId: string) => boolean;
registerActiveSession: (sessionId: string) => void;
unregisterActiveSession: (sessionId: string) => void;
isSessionActive: (sessionId: string) => boolean;
onStreamComplete: (callback: StreamCompleteCallback) => () => void;
}
type ChatStore = ChatStoreState & ChatStoreActions;
function notifyStreamComplete(
callbacks: Set<StreamCompleteCallback>,
sessionId: string,
) {
for (const callback of callbacks) {
try {
callback(sessionId);
} catch (err) {
console.warn("[ChatStore] Stream complete callback error:", err);
}
}
}
function cleanupCompletedStreams(completedStreams: Map<string, StreamResult>) {
const now = Date.now();
for (const [sessionId, result] of completedStreams) {
if (now - result.completedAt > COMPLETED_STREAM_TTL) {
completedStreams.delete(sessionId);
}
}
}
function moveToCompleted(
activeStreams: Map<string, ActiveStream>,
completedStreams: Map<string, StreamResult>,
streamCompleteCallbacks: Set<StreamCompleteCallback>,
sessionId: string,
) {
const stream = activeStreams.get(sessionId);
if (!stream) return;
const result: StreamResult = {
sessionId,
status: stream.status,
chunks: stream.chunks,
completedAt: Date.now(),
error: stream.error,
};
completedStreams.set(sessionId, result);
activeStreams.delete(sessionId);
cleanupCompletedStreams(completedStreams);
if (stream.status === "completed" || stream.status === "error") {
notifyStreamComplete(streamCompleteCallbacks, sessionId);
}
}
export const useChatStore = create<ChatStore>((set, get) => ({
activeStreams: new Map(),
completedStreams: new Map(),
activeSessions: new Set(),
streamCompleteCallbacks: new Set(),
startStream: async function startStream(
sessionId,
message,
isUserMessage,
context,
onChunk,
) {
const { activeStreams, completedStreams, streamCompleteCallbacks } = get();
const existingStream = activeStreams.get(sessionId);
if (existingStream) {
existingStream.abortController.abort();
moveToCompleted(
activeStreams,
completedStreams,
streamCompleteCallbacks,
sessionId,
);
}
const abortController = new AbortController();
const initialCallbacks = new Set<(chunk: StreamChunk) => void>();
if (onChunk) initialCallbacks.add(onChunk);
const stream: ActiveStream = {
sessionId,
abortController,
status: "streaming",
startedAt: Date.now(),
chunks: [],
onChunkCallbacks: initialCallbacks,
};
activeStreams.set(sessionId, stream);
try {
await executeStream(stream, message, isUserMessage, context);
} finally {
if (onChunk) stream.onChunkCallbacks.delete(onChunk);
if (stream.status !== "streaming") {
moveToCompleted(
activeStreams,
completedStreams,
streamCompleteCallbacks,
sessionId,
);
}
}
},
stopStream: function stopStream(sessionId) {
const { activeStreams, completedStreams, streamCompleteCallbacks } = get();
const stream = activeStreams.get(sessionId);
if (stream) {
stream.abortController.abort();
stream.status = "completed";
moveToCompleted(
activeStreams,
completedStreams,
streamCompleteCallbacks,
sessionId,
);
}
},
subscribeToStream: function subscribeToStream(
sessionId,
onChunk,
skipReplay = false,
) {
const { activeStreams } = get();
const stream = activeStreams.get(sessionId);
if (stream) {
if (!skipReplay) {
for (const chunk of stream.chunks) {
onChunk(chunk);
}
}
stream.onChunkCallbacks.add(onChunk);
return function unsubscribe() {
stream.onChunkCallbacks.delete(onChunk);
};
}
return function noop() {};
},
getStreamStatus: function getStreamStatus(sessionId) {
const { activeStreams, completedStreams } = get();
const active = activeStreams.get(sessionId);
if (active) return active.status;
const completed = completedStreams.get(sessionId);
if (completed) return completed.status;
return "idle";
},
getCompletedStream: function getCompletedStream(sessionId) {
return get().completedStreams.get(sessionId);
},
clearCompletedStream: function clearCompletedStream(sessionId) {
get().completedStreams.delete(sessionId);
},
isStreaming: function isStreaming(sessionId) {
const stream = get().activeStreams.get(sessionId);
return stream?.status === "streaming";
},
registerActiveSession: function registerActiveSession(sessionId) {
get().activeSessions.add(sessionId);
},
unregisterActiveSession: function unregisterActiveSession(sessionId) {
get().activeSessions.delete(sessionId);
},
isSessionActive: function isSessionActive(sessionId) {
return get().activeSessions.has(sessionId);
},
onStreamComplete: function onStreamComplete(callback) {
const { streamCompleteCallbacks } = get();
streamCompleteCallbacks.add(callback);
return function unsubscribe() {
streamCompleteCallbacks.delete(callback);
};
},
}));

View File

@@ -0,0 +1,94 @@
import type { ToolArguments, ToolResult } from "@/types/chat";
export type StreamStatus = "idle" | "streaming" | "completed" | "error";
export interface StreamChunk {
type:
| "text_chunk"
| "text_ended"
| "tool_call"
| "tool_call_start"
| "tool_response"
| "login_needed"
| "need_login"
| "credentials_needed"
| "error"
| "usage"
| "stream_end";
timestamp?: string;
content?: string;
message?: string;
code?: string;
details?: Record<string, unknown>;
tool_id?: string;
tool_name?: string;
arguments?: ToolArguments;
result?: ToolResult;
success?: boolean;
idx?: number;
session_id?: string;
agent_info?: {
graph_id: string;
name: string;
trigger_type: string;
};
provider?: string;
provider_name?: string;
credential_type?: string;
scopes?: string[];
title?: string;
[key: string]: unknown;
}
export type VercelStreamChunk =
| { type: "start"; messageId: string }
| { type: "finish" }
| { type: "text-start"; id: string }
| { type: "text-delta"; id: string; delta: string }
| { type: "text-end"; id: string }
| { type: "tool-input-start"; toolCallId: string; toolName: string }
| {
type: "tool-input-available";
toolCallId: string;
toolName: string;
input: Record<string, unknown>;
}
| {
type: "tool-output-available";
toolCallId: string;
toolName?: string;
output: unknown;
success?: boolean;
}
| {
type: "usage";
promptTokens: number;
completionTokens: number;
totalTokens: number;
}
| {
type: "error";
errorText: string;
code?: string;
details?: Record<string, unknown>;
};
export interface ActiveStream {
sessionId: string;
abortController: AbortController;
status: StreamStatus;
startedAt: number;
chunks: StreamChunk[];
error?: Error;
onChunkCallbacks: Set<(chunk: StreamChunk) => void>;
}
export interface StreamResult {
sessionId: string;
status: StreamStatus;
chunks: StreamChunk[];
completedAt: number;
error?: Error;
}
export type StreamCompleteCallback = (sessionId: string) => void;

View File

@@ -4,6 +4,7 @@ import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
import { cn } from "@/lib/utils";
import { GlobeHemisphereEastIcon } from "@phosphor-icons/react";
import { useEffect } from "react";
import { ChatInput } from "../ChatInput/ChatInput";
import { MessageList } from "../MessageList/MessageList";
@@ -55,24 +56,37 @@ export function ChatContainer({
)}
>
<Dialog
title="Service unavailable"
title={
<div className="flex items-center gap-2">
<GlobeHemisphereEastIcon className="size-6" />
<Text
variant="body"
className="text-md font-poppins leading-none md:text-lg"
>
Service unavailable
</Text>
</div>
}
controlled={{
isOpen: isRegionBlockedModalOpen,
set: handleRegionModalOpenChange,
}}
onClose={handleRegionModalClose}
styling={{ maxWidth: 550, width: "100%", minWidth: "auto" }}
>
<Dialog.Content>
<div className="flex flex-col gap-4">
<div className="flex flex-col gap-8">
<Text variant="body">
This model is not available in your region. Please connect via VPN
and try again.
The Autogpt AI model is not available in your region or your
connection is blocking it. Please try again with a different
connection.
</Text>
<div className="flex justify-end">
<div className="flex justify-center">
<Button
type="button"
variant="primary"
onClick={handleRegionModalClose}
className="w-full"
>
Got it
</Button>

View File

@@ -1,5 +1,5 @@
import { toast } from "sonner";
import { StreamChunk } from "../../useChatStream";
import type { StreamChunk } from "../../chat-types";
import type { HandlerDependencies } from "./handlers";
import {
handleError,

View File

@@ -1,7 +1,118 @@
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
import { SessionKey, sessionStorage } from "@/services/storage/session-storage";
import type { ToolResult } from "@/types/chat";
import type { ChatMessageData } from "../ChatMessage/useChatMessage";
export function processInitialMessages(
initialMessages: SessionDetailResponse["messages"],
): ChatMessageData[] {
const processedMessages: ChatMessageData[] = [];
const toolCallMap = new Map<string, string>();
for (const msg of initialMessages) {
if (!isValidMessage(msg)) {
console.warn("Invalid message structure from backend:", msg);
continue;
}
let content = String(msg.content || "");
const role = String(msg.role || "assistant").toLowerCase();
const toolCalls = msg.tool_calls;
const timestamp = msg.timestamp
? new Date(msg.timestamp as string)
: undefined;
if (role === "user") {
content = removePageContext(content);
if (!content.trim()) continue;
processedMessages.push({
type: "message",
role: "user",
content,
timestamp,
});
continue;
}
if (role === "assistant") {
content = content
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
.replace(/<internal_reasoning>[\s\S]*?<\/internal_reasoning>/gi, "")
.trim();
if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) {
for (const toolCall of toolCalls) {
const toolName = toolCall.function.name;
const toolId = toolCall.id;
toolCallMap.set(toolId, toolName);
try {
const args = JSON.parse(toolCall.function.arguments || "{}");
processedMessages.push({
type: "tool_call",
toolId,
toolName,
arguments: args,
timestamp,
});
} catch (err) {
console.warn("Failed to parse tool call arguments:", err);
processedMessages.push({
type: "tool_call",
toolId,
toolName,
arguments: {},
timestamp,
});
}
}
if (content.trim()) {
processedMessages.push({
type: "message",
role: "assistant",
content,
timestamp,
});
}
} else if (content.trim()) {
processedMessages.push({
type: "message",
role: "assistant",
content,
timestamp,
});
}
continue;
}
if (role === "tool") {
const toolCallId = (msg.tool_call_id as string) || "";
const toolName = toolCallMap.get(toolCallId) || "unknown";
const toolResponse = parseToolResponse(
content,
toolCallId,
toolName,
timestamp,
);
if (toolResponse) {
processedMessages.push(toolResponse);
}
continue;
}
if (content.trim()) {
processedMessages.push({
type: "message",
role: role as "user" | "assistant" | "system",
content,
timestamp,
});
}
}
return processedMessages;
}
export function hasSentInitialPrompt(sessionId: string): boolean {
try {
const sent = JSON.parse(

View File

@@ -1,5 +1,6 @@
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useEffect, useMemo, useRef, useState } from "react";
import { useChatStore } from "../../chat-store";
import { toast } from "sonner";
import { useChatStream } from "../../useChatStream";
import { usePageContext } from "../../usePageContext";
@@ -9,11 +10,8 @@ import {
createUserMessage,
filterAuthMessages,
hasSentInitialPrompt,
isToolCallArray,
isValidMessage,
markInitialPromptSent,
parseToolResponse,
removePageContext,
processInitialMessages,
} from "./helpers";
interface Args {
@@ -41,11 +39,18 @@ export function useChatContainer({
sendMessage: sendStreamMessage,
stopStreaming,
} = useChatStream();
const activeStreams = useChatStore((s) => s.activeStreams);
const subscribeToStream = useChatStore((s) => s.subscribeToStream);
const isStreaming = isStreamingInitiated || hasTextChunks;
useEffect(() => {
if (sessionId !== previousSessionIdRef.current) {
stopStreaming(previousSessionIdRef.current ?? undefined, true);
useEffect(
function handleSessionChange() {
if (sessionId === previousSessionIdRef.current) return;
const prevSession = previousSessionIdRef.current;
if (prevSession) {
stopStreaming(prevSession);
}
previousSessionIdRef.current = sessionId;
setMessages([]);
setStreamingChunks([]);
@@ -53,138 +58,11 @@ export function useChatContainer({
setHasTextChunks(false);
setIsStreamingInitiated(false);
hasResponseRef.current = false;
}
}, [sessionId, stopStreaming]);
const allMessages = useMemo(() => {
const processedInitialMessages: ChatMessageData[] = [];
const toolCallMap = new Map<string, string>();
if (!sessionId) return;
for (const msg of initialMessages) {
if (!isValidMessage(msg)) {
console.warn("Invalid message structure from backend:", msg);
continue;
}
let content = String(msg.content || "");
const role = String(msg.role || "assistant").toLowerCase();
const toolCalls = msg.tool_calls;
const timestamp = msg.timestamp
? new Date(msg.timestamp as string)
: undefined;
if (role === "user") {
content = removePageContext(content);
if (!content.trim()) continue;
processedInitialMessages.push({
type: "message",
role: "user",
content,
timestamp,
});
continue;
}
if (role === "assistant") {
content = content
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
.trim();
if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) {
for (const toolCall of toolCalls) {
const toolName = toolCall.function.name;
const toolId = toolCall.id;
toolCallMap.set(toolId, toolName);
try {
const args = JSON.parse(toolCall.function.arguments || "{}");
processedInitialMessages.push({
type: "tool_call",
toolId,
toolName,
arguments: args,
timestamp,
});
} catch (err) {
console.warn("Failed to parse tool call arguments:", err);
processedInitialMessages.push({
type: "tool_call",
toolId,
toolName,
arguments: {},
timestamp,
});
}
}
if (content.trim()) {
processedInitialMessages.push({
type: "message",
role: "assistant",
content,
timestamp,
});
}
} else if (content.trim()) {
processedInitialMessages.push({
type: "message",
role: "assistant",
content,
timestamp,
});
}
continue;
}
if (role === "tool") {
const toolCallId = (msg.tool_call_id as string) || "";
const toolName = toolCallMap.get(toolCallId) || "unknown";
const toolResponse = parseToolResponse(
content,
toolCallId,
toolName,
timestamp,
);
if (toolResponse) {
processedInitialMessages.push(toolResponse);
}
continue;
}
if (content.trim()) {
processedInitialMessages.push({
type: "message",
role: role as "user" | "assistant" | "system",
content,
timestamp,
});
}
}
return [...processedInitialMessages, ...messages];
}, [initialMessages, messages]);
const sendMessage = useCallback(
async function sendMessage(
content: string,
isUserMessage: boolean = true,
context?: { url: string; content: string },
) {
if (!sessionId) {
console.error("[useChatContainer] Cannot send message: no session ID");
return;
}
setIsRegionBlockedModalOpen(false);
if (isUserMessage) {
const userMessage = createUserMessage(content);
setMessages((prev) => [...filterAuthMessages(prev), userMessage]);
} else {
setMessages((prev) => filterAuthMessages(prev));
}
setStreamingChunks([]);
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(true);
hasResponseRef.current = false;
const activeStream = activeStreams.get(sessionId);
if (!activeStream || activeStream.status !== "streaming") return;
const dispatcher = createStreamEventDispatcher({
setHasTextChunks,
@@ -197,42 +75,85 @@ export function useChatContainer({
setIsStreamingInitiated,
});
try {
await sendStreamMessage(
sessionId,
content,
dispatcher,
isUserMessage,
context,
);
} catch (err) {
console.error("[useChatContainer] Failed to send message:", err);
setIsStreamingInitiated(false);
// Don't show error toast for AbortError (expected during cleanup)
if (err instanceof Error && err.name === "AbortError") return;
const errorMessage =
err instanceof Error ? err.message : "Failed to send message";
toast.error("Failed to send message", {
description: errorMessage,
});
}
setIsStreamingInitiated(true);
const skipReplay = initialMessages.length > 0;
return subscribeToStream(sessionId, dispatcher, skipReplay);
},
[sessionId, sendStreamMessage],
[sessionId, stopStreaming, activeStreams, subscribeToStream],
);
const handleStopStreaming = useCallback(() => {
const allMessages = useMemo(
() => [...processInitialMessages(initialMessages), ...messages],
[initialMessages, messages],
);
async function sendMessage(
content: string,
isUserMessage: boolean = true,
context?: { url: string; content: string },
) {
if (!sessionId) {
console.error("[useChatContainer] Cannot send message: no session ID");
return;
}
setIsRegionBlockedModalOpen(false);
if (isUserMessage) {
const userMessage = createUserMessage(content);
setMessages((prev) => [...filterAuthMessages(prev), userMessage]);
} else {
setMessages((prev) => filterAuthMessages(prev));
}
setStreamingChunks([]);
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(true);
hasResponseRef.current = false;
const dispatcher = createStreamEventDispatcher({
setHasTextChunks,
setStreamingChunks,
streamingChunksRef,
hasResponseRef,
setMessages,
setIsRegionBlockedModalOpen,
sessionId,
setIsStreamingInitiated,
});
try {
await sendStreamMessage(
sessionId,
content,
dispatcher,
isUserMessage,
context,
);
} catch (err) {
console.error("[useChatContainer] Failed to send message:", err);
setIsStreamingInitiated(false);
if (err instanceof Error && err.name === "AbortError") return;
const errorMessage =
err instanceof Error ? err.message : "Failed to send message";
toast.error("Failed to send message", {
description: errorMessage,
});
}
}
function handleStopStreaming() {
stopStreaming();
setStreamingChunks([]);
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(false);
}, [stopStreaming]);
}
const { capturePageContext } = usePageContext();
const sendMessageRef = useRef(sendMessage);
sendMessageRef.current = sendMessage;
// Send initial prompt if provided (for new sessions from homepage)
useEffect(
function handleInitialPrompt() {
if (!initialPrompt || !sessionId) return;
@@ -241,15 +162,9 @@ export function useChatContainer({
markInitialPromptSent(sessionId);
const context = capturePageContext();
sendMessage(initialPrompt, true, context);
sendMessageRef.current(initialPrompt, true, context);
},
[
initialPrompt,
sessionId,
initialMessages.length,
sendMessage,
capturePageContext,
],
[initialPrompt, sessionId, initialMessages.length, capturePageContext],
);
async function sendMessageWithContext(

View File

@@ -21,7 +21,7 @@ export function ChatInput({
className,
}: Props) {
const inputId = "chat-input";
const { value, setValue, handleKeyDown, handleSend, hasMultipleLines } =
const { value, handleKeyDown, handleSubmit, handleChange, hasMultipleLines } =
useChatInput({
onSend,
disabled: disabled || isStreaming,
@@ -29,15 +29,6 @@ export function ChatInput({
inputId,
});
function handleSubmit(e: React.FormEvent<HTMLFormElement>) {
e.preventDefault();
handleSend();
}
function handleChange(e: React.ChangeEvent<HTMLTextAreaElement>) {
setValue(e.target.value);
}
return (
<form onSubmit={handleSubmit} className={cn("relative flex-1", className)}>
<div className="relative">

View File

@@ -1,4 +1,10 @@
import { KeyboardEvent, useCallback, useEffect, useState } from "react";
import {
ChangeEvent,
FormEvent,
KeyboardEvent,
useEffect,
useState,
} from "react";
interface UseChatInputArgs {
onSend: (message: string) => void;
@@ -16,6 +22,23 @@ export function useChatInput({
const [value, setValue] = useState("");
const [hasMultipleLines, setHasMultipleLines] = useState(false);
useEffect(
function focusOnMount() {
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
if (textarea) textarea.focus();
},
[inputId],
);
useEffect(
function focusWhenEnabled() {
if (disabled) return;
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
if (textarea) textarea.focus();
},
[disabled, inputId],
);
useEffect(() => {
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
const wrapper = document.getElementById(
@@ -77,7 +100,7 @@ export function useChatInput({
}
}, [value, maxRows, inputId]);
const handleSend = useCallback(() => {
const handleSend = () => {
if (disabled || !value.trim()) return;
onSend(value.trim());
setValue("");
@@ -93,23 +116,31 @@ export function useChatInput({
wrapper.style.height = "";
wrapper.style.maxHeight = "";
}
}, [value, onSend, disabled, inputId]);
};
const handleKeyDown = useCallback(
(event: KeyboardEvent<HTMLTextAreaElement>) => {
if (event.key === "Enter" && !event.shiftKey) {
event.preventDefault();
handleSend();
}
},
[handleSend],
);
function handleKeyDown(event: KeyboardEvent<HTMLTextAreaElement>) {
if (event.key === "Enter" && !event.shiftKey) {
event.preventDefault();
handleSend();
}
}
function handleSubmit(e: FormEvent<HTMLFormElement>) {
e.preventDefault();
handleSend();
}
function handleChange(e: ChangeEvent<HTMLTextAreaElement>) {
setValue(e.target.value);
}
return {
value,
setValue,
handleKeyDown,
handleSend,
handleSubmit,
handleChange,
hasMultipleLines,
};
}

View File

@@ -1,7 +1,5 @@
import { AIChatBubble } from "../../../AIChatBubble/AIChatBubble";
import type { ChatMessageData } from "../../../ChatMessage/useChatMessage";
import { MarkdownContent } from "../../../MarkdownContent/MarkdownContent";
import { formatToolResponse } from "../../../ToolResponseMessage/helpers";
import { ToolResponseMessage } from "../../../ToolResponseMessage/ToolResponseMessage";
import { shouldSkipAgentOutput } from "../../helpers";
export interface LastToolResponseProps {
@@ -15,16 +13,15 @@ export function LastToolResponse({
}: LastToolResponseProps) {
if (message.type !== "tool_response") return null;
// Skip if this is an agent_output that should be rendered inside assistant message
if (shouldSkipAgentOutput(message, prevMessage)) return null;
const formattedText = formatToolResponse(message.result, message.toolName);
return (
<div className="min-w-0 overflow-x-hidden hyphens-auto break-words px-4 py-2">
<AIChatBubble>
<MarkdownContent content={formattedText} />
</AIChatBubble>
<ToolResponseMessage
toolId={message.toolId}
toolName={message.toolName}
result={message.result}
/>
</div>
);
}

View File

@@ -1,7 +1,14 @@
import { Text } from "@/components/atoms/Text/Text";
import { cn } from "@/lib/utils";
import type { ToolResult } from "@/types/chat";
import { WarningCircleIcon } from "@phosphor-icons/react";
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
import { MarkdownContent } from "../MarkdownContent/MarkdownContent";
import { formatToolResponse } from "./helpers";
import {
formatToolResponse,
getErrorMessage,
isErrorResponse,
} from "./helpers";
export interface ToolResponseMessageProps {
toolId?: string;
@@ -18,6 +25,24 @@ export function ToolResponseMessage({
success: _success,
className,
}: ToolResponseMessageProps) {
if (isErrorResponse(result)) {
const errorMessage = getErrorMessage(result);
return (
<AIChatBubble className={className}>
<div className="flex items-center gap-2">
<WarningCircleIcon
size={14}
weight="regular"
className="shrink-0 text-neutral-400"
/>
<Text variant="small" className={cn("text-xs text-neutral-500")}>
{errorMessage}
</Text>
</div>
</AIChatBubble>
);
}
const formattedText = formatToolResponse(result, toolName);
return (

View File

@@ -1,3 +1,42 @@
function stripInternalReasoning(content: string): string {
return content
.replace(/<internal_reasoning>[\s\S]*?<\/internal_reasoning>/gi, "")
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
.replace(/\n{3,}/g, "\n\n")
.trim();
}
export function isErrorResponse(result: unknown): boolean {
if (typeof result === "string") {
const lower = result.toLowerCase();
return (
lower.startsWith("error:") ||
lower.includes("not found") ||
lower.includes("does not exist") ||
lower.includes("failed to") ||
lower.includes("unable to")
);
}
if (typeof result === "object" && result !== null) {
const response = result as Record<string, unknown>;
return response.type === "error" || response.error !== undefined;
}
return false;
}
export function getErrorMessage(result: unknown): string {
if (typeof result === "string") {
return stripInternalReasoning(result.replace(/^error:\s*/i, ""));
}
if (typeof result === "object" && result !== null) {
const response = result as Record<string, unknown>;
if (response.error) return stripInternalReasoning(String(response.error));
if (response.message)
return stripInternalReasoning(String(response.message));
}
return "An error occurred";
}
function getToolCompletionPhrase(toolName: string): string {
const toolCompletionPhrases: Record<string, string> = {
add_understanding: "Updated your business information",
@@ -28,10 +67,10 @@ export function formatToolResponse(result: unknown, toolName: string): string {
const parsed = JSON.parse(trimmed);
return formatToolResponse(parsed, toolName);
} catch {
return trimmed;
return stripInternalReasoning(trimmed);
}
}
return result;
return stripInternalReasoning(result);
}
if (typeof result !== "object" || result === null) {

View File

@@ -0,0 +1,142 @@
import type {
ActiveStream,
StreamChunk,
VercelStreamChunk,
} from "./chat-types";
import {
INITIAL_RETRY_DELAY,
MAX_RETRIES,
normalizeStreamChunk,
parseSSELine,
} from "./stream-utils";
function notifySubscribers(stream: ActiveStream, chunk: StreamChunk) {
stream.chunks.push(chunk);
for (const callback of stream.onChunkCallbacks) {
try {
callback(chunk);
} catch (err) {
console.warn("[StreamExecutor] Subscriber callback error:", err);
}
}
}
export async function executeStream(
stream: ActiveStream,
message: string,
isUserMessage: boolean,
context?: { url: string; content: string },
retryCount: number = 0,
): Promise<void> {
const { sessionId, abortController } = stream;
try {
const url = `/api/chat/sessions/${sessionId}/stream`;
const body = JSON.stringify({
message,
is_user_message: isUserMessage,
context: context || null,
});
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "text/event-stream",
},
body,
signal: abortController.signal,
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(errorText || `HTTP ${response.status}`);
}
if (!response.body) {
throw new Error("Response body is null");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) {
notifySubscribers(stream, { type: "stream_end" });
stream.status = "completed";
return;
}
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
const data = parseSSELine(line);
if (data !== null) {
if (data === "[DONE]") {
notifySubscribers(stream, { type: "stream_end" });
stream.status = "completed";
return;
}
try {
const rawChunk = JSON.parse(data) as
| StreamChunk
| VercelStreamChunk;
const chunk = normalizeStreamChunk(rawChunk);
if (!chunk) continue;
notifySubscribers(stream, chunk);
if (chunk.type === "stream_end") {
stream.status = "completed";
return;
}
if (chunk.type === "error") {
stream.status = "error";
stream.error = new Error(
chunk.message || chunk.content || "Stream error",
);
return;
}
} catch (err) {
console.warn("[StreamExecutor] Failed to parse SSE chunk:", err);
}
}
}
}
} catch (err) {
if (err instanceof Error && err.name === "AbortError") {
notifySubscribers(stream, { type: "stream_end" });
stream.status = "completed";
return;
}
if (retryCount < MAX_RETRIES) {
const retryDelay = INITIAL_RETRY_DELAY * Math.pow(2, retryCount);
console.log(
`[StreamExecutor] Retrying in ${retryDelay}ms (attempt ${retryCount + 1}/${MAX_RETRIES})`,
);
await new Promise((resolve) => setTimeout(resolve, retryDelay));
return executeStream(
stream,
message,
isUserMessage,
context,
retryCount + 1,
);
}
stream.status = "error";
stream.error = err instanceof Error ? err : new Error("Stream failed");
notifySubscribers(stream, {
type: "error",
message: stream.error.message,
});
}
}

View File

@@ -0,0 +1,84 @@
import type { ToolArguments, ToolResult } from "@/types/chat";
import type { StreamChunk, VercelStreamChunk } from "./chat-types";
const LEGACY_STREAM_TYPES = new Set<StreamChunk["type"]>([
"text_chunk",
"text_ended",
"tool_call",
"tool_call_start",
"tool_response",
"login_needed",
"need_login",
"credentials_needed",
"error",
"usage",
"stream_end",
]);
export function isLegacyStreamChunk(
chunk: StreamChunk | VercelStreamChunk,
): chunk is StreamChunk {
return LEGACY_STREAM_TYPES.has(chunk.type as StreamChunk["type"]);
}
export function normalizeStreamChunk(
chunk: StreamChunk | VercelStreamChunk,
): StreamChunk | null {
if (isLegacyStreamChunk(chunk)) return chunk;
switch (chunk.type) {
case "text-delta":
return { type: "text_chunk", content: chunk.delta };
case "text-end":
return { type: "text_ended" };
case "tool-input-available":
return {
type: "tool_call_start",
tool_id: chunk.toolCallId,
tool_name: chunk.toolName,
arguments: chunk.input as ToolArguments,
};
case "tool-output-available":
return {
type: "tool_response",
tool_id: chunk.toolCallId,
tool_name: chunk.toolName,
result: chunk.output as ToolResult,
success: chunk.success ?? true,
};
case "usage":
return {
type: "usage",
promptTokens: chunk.promptTokens,
completionTokens: chunk.completionTokens,
totalTokens: chunk.totalTokens,
};
case "error":
return {
type: "error",
message: chunk.errorText,
code: chunk.code,
details: chunk.details,
};
case "finish":
return { type: "stream_end" };
case "start":
case "text-start":
return null;
case "tool-input-start":
return {
type: "tool_call_start",
tool_id: chunk.toolCallId,
tool_name: chunk.toolName,
arguments: {},
};
}
}
export const MAX_RETRIES = 3;
export const INITIAL_RETRY_DELAY = 1000;
export function parseSSELine(line: string): string | null {
if (line.startsWith("data: ")) return line.slice(6);
return null;
}

View File

@@ -2,7 +2,6 @@
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useEffect, useRef, useState } from "react";
import { toast } from "sonner";
import { useChatSession } from "./useChatSession";
import { useChatStream } from "./useChatStream";
@@ -67,38 +66,16 @@ export function useChat({ urlSessionId }: UseChatArgs = {}) {
],
);
useEffect(() => {
if (isLoading || isCreating) {
const timer = setTimeout(() => {
setShowLoader(true);
}, 300);
return () => clearTimeout(timer);
} else {
useEffect(
function showLoaderWithDelay() {
if (isLoading || isCreating) {
const timer = setTimeout(() => setShowLoader(true), 300);
return () => clearTimeout(timer);
}
setShowLoader(false);
}
}, [isLoading, isCreating]);
useEffect(function monitorNetworkStatus() {
function handleOnline() {
toast.success("Connection restored", {
description: "You're back online",
});
}
function handleOffline() {
toast.error("You're offline", {
description: "Check your internet connection",
});
}
window.addEventListener("online", handleOnline);
window.addEventListener("offline", handleOffline);
return () => {
window.removeEventListener("online", handleOnline);
window.removeEventListener("offline", handleOffline);
};
}, []);
},
[isLoading, isCreating],
);
function clearSession() {
clearSessionBase();

View File

@@ -1,17 +0,0 @@
"use client";
import { create } from "zustand";
interface ChatDrawerState {
isOpen: boolean;
open: () => void;
close: () => void;
toggle: () => void;
}
export const useChatDrawer = create<ChatDrawerState>((set) => ({
isOpen: false,
open: () => set({ isOpen: true }),
close: () => set({ isOpen: false }),
toggle: () => set((state) => ({ isOpen: !state.isOpen })),
}));

View File

@@ -1,6 +1,7 @@
import {
getGetV2GetSessionQueryKey,
getGetV2GetSessionQueryOptions,
getGetV2ListSessionsQueryKey,
postV2CreateSession,
useGetV2GetSession,
usePatchV2SessionAssignUser,
@@ -101,6 +102,17 @@ export function useChatSession({
}
}, [createError, loadError]);
useEffect(
function refreshSessionsListOnLoad() {
if (sessionId && sessionData && !isLoadingSession) {
queryClient.invalidateQueries({
queryKey: getGetV2ListSessionsQueryKey(),
});
}
},
[sessionId, sessionData, isLoadingSession, queryClient],
);
async function createSession() {
try {
setError(null);

View File

@@ -1,543 +1,110 @@
import type { ToolArguments, ToolResult } from "@/types/chat";
import { useCallback, useEffect, useRef, useState } from "react";
"use client";
import { useEffect, useRef, useState } from "react";
import { toast } from "sonner";
import { useChatStore } from "./chat-store";
import type { StreamChunk } from "./chat-types";
const MAX_RETRIES = 3;
const INITIAL_RETRY_DELAY = 1000;
export interface StreamChunk {
type:
| "text_chunk"
| "text_ended"
| "tool_call"
| "tool_call_start"
| "tool_response"
| "login_needed"
| "need_login"
| "credentials_needed"
| "error"
| "usage"
| "stream_end";
timestamp?: string;
content?: string;
message?: string;
code?: string;
details?: Record<string, unknown>;
tool_id?: string;
tool_name?: string;
arguments?: ToolArguments;
result?: ToolResult;
success?: boolean;
idx?: number;
session_id?: string;
agent_info?: {
graph_id: string;
name: string;
trigger_type: string;
};
provider?: string;
provider_name?: string;
credential_type?: string;
scopes?: string[];
title?: string;
[key: string]: unknown;
}
type VercelStreamChunk =
| { type: "start"; messageId: string }
| { type: "finish" }
| { type: "text-start"; id: string }
| { type: "text-delta"; id: string; delta: string }
| { type: "text-end"; id: string }
| { type: "tool-input-start"; toolCallId: string; toolName: string }
| {
type: "tool-input-available";
toolCallId: string;
toolName: string;
input: ToolArguments;
}
| {
type: "tool-output-available";
toolCallId: string;
toolName?: string;
output: ToolResult;
success?: boolean;
}
| {
type: "usage";
promptTokens: number;
completionTokens: number;
totalTokens: number;
}
| {
type: "error";
errorText: string;
code?: string;
details?: Record<string, unknown>;
};
const LEGACY_STREAM_TYPES = new Set<StreamChunk["type"]>([
"text_chunk",
"text_ended",
"tool_call",
"tool_call_start",
"tool_response",
"login_needed",
"need_login",
"credentials_needed",
"error",
"usage",
"stream_end",
]);
function isLegacyStreamChunk(
chunk: StreamChunk | VercelStreamChunk,
): chunk is StreamChunk {
return LEGACY_STREAM_TYPES.has(chunk.type as StreamChunk["type"]);
}
function normalizeStreamChunk(
chunk: StreamChunk | VercelStreamChunk,
): StreamChunk | null {
if (isLegacyStreamChunk(chunk)) {
return chunk;
}
switch (chunk.type) {
case "text-delta":
return { type: "text_chunk", content: chunk.delta };
case "text-end":
return { type: "text_ended" };
case "tool-input-available":
return {
type: "tool_call_start",
tool_id: chunk.toolCallId,
tool_name: chunk.toolName,
arguments: chunk.input,
};
case "tool-output-available":
return {
type: "tool_response",
tool_id: chunk.toolCallId,
tool_name: chunk.toolName,
result: chunk.output,
success: chunk.success ?? true,
};
case "usage":
return {
type: "usage",
promptTokens: chunk.promptTokens,
completionTokens: chunk.completionTokens,
totalTokens: chunk.totalTokens,
};
case "error":
return {
type: "error",
message: chunk.errorText,
code: chunk.code,
details: chunk.details,
};
case "finish":
return { type: "stream_end" };
case "start":
case "text-start":
return null;
case "tool-input-start":
const toolInputStart = chunk as Extract<
VercelStreamChunk,
{ type: "tool-input-start" }
>;
return {
type: "tool_call_start",
tool_id: toolInputStart.toolCallId,
tool_name: toolInputStart.toolName,
arguments: {},
};
}
}
export type { StreamChunk } from "./chat-types";
export function useChatStream() {
const [isStreaming, setIsStreaming] = useState(false);
const [error, setError] = useState<Error | null>(null);
const retryCountRef = useRef<number>(0);
const retryTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const abortControllerRef = useRef<AbortController | null>(null);
const currentSessionIdRef = useRef<string | null>(null);
const requestStartTimeRef = useRef<number | null>(null);
const stopStreaming = useCallback(
(sessionId?: string, force: boolean = false) => {
console.log("[useChatStream] stopStreaming called", {
hasAbortController: !!abortControllerRef.current,
isAborted: abortControllerRef.current?.signal.aborted,
currentSessionId: currentSessionIdRef.current,
requestedSessionId: sessionId,
requestStartTime: requestStartTimeRef.current,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
force,
stack: new Error().stack,
});
if (
sessionId &&
currentSessionIdRef.current &&
currentSessionIdRef.current !== sessionId
) {
console.log(
"[useChatStream] Session changed, aborting previous stream",
{
oldSessionId: currentSessionIdRef.current,
newSessionId: sessionId,
},
);
}
const controller = abortControllerRef.current;
if (controller) {
const timeSinceStart = requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null;
if (!force && timeSinceStart !== null && timeSinceStart < 100) {
console.log(
"[useChatStream] Request just started (<100ms), skipping abort to prevent race condition",
{
timeSinceStart,
},
);
return;
}
try {
const signal = controller.signal;
if (
signal &&
typeof signal.aborted === "boolean" &&
!signal.aborted
) {
console.log("[useChatStream] Aborting stream");
controller.abort();
} else {
console.log(
"[useChatStream] Stream already aborted or signal invalid",
);
}
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
console.log(
"[useChatStream] AbortError caught (expected during cleanup)",
);
} else {
console.warn("[useChatStream] Error aborting stream:", error);
}
} finally {
abortControllerRef.current = null;
requestStartTimeRef.current = null;
}
}
if (retryTimeoutRef.current) {
clearTimeout(retryTimeoutRef.current);
retryTimeoutRef.current = null;
}
setIsStreaming(false);
},
[],
const onChunkCallbackRef = useRef<((chunk: StreamChunk) => void) | null>(
null,
);
const stopStream = useChatStore((s) => s.stopStream);
const unregisterActiveSession = useChatStore(
(s) => s.unregisterActiveSession,
);
const isSessionActive = useChatStore((s) => s.isSessionActive);
const onStreamComplete = useChatStore((s) => s.onStreamComplete);
const getCompletedStream = useChatStore((s) => s.getCompletedStream);
const registerActiveSession = useChatStore((s) => s.registerActiveSession);
const startStream = useChatStore((s) => s.startStream);
const getStreamStatus = useChatStore((s) => s.getStreamStatus);
function stopStreaming(sessionId?: string) {
const targetSession = sessionId || currentSessionIdRef.current;
if (targetSession) {
stopStream(targetSession);
unregisterActiveSession(targetSession);
}
setIsStreaming(false);
}
useEffect(() => {
console.log("[useChatStream] Component mounted");
return () => {
const sessionIdAtUnmount = currentSessionIdRef.current;
console.log(
"[useChatStream] Component unmounting, calling stopStreaming",
{
sessionIdAtUnmount,
},
);
stopStreaming(undefined, false);
return function cleanup() {
const sessionId = currentSessionIdRef.current;
if (sessionId && !isSessionActive(sessionId)) {
stopStream(sessionId);
}
currentSessionIdRef.current = null;
onChunkCallbackRef.current = null;
};
}, [stopStreaming]);
}, []);
const sendMessage = useCallback(
async (
sessionId: string,
message: string,
onChunk: (chunk: StreamChunk) => void,
isUserMessage: boolean = true,
context?: { url: string; content: string },
isRetry: boolean = false,
) => {
console.log("[useChatStream] sendMessage called", {
sessionId,
message: message.substring(0, 50),
isUserMessage,
isRetry,
stack: new Error().stack,
});
useEffect(() => {
const unsubscribe = onStreamComplete(
function handleStreamComplete(completedSessionId) {
if (completedSessionId !== currentSessionIdRef.current) return;
const previousSessionId = currentSessionIdRef.current;
stopStreaming(sessionId, true);
currentSessionIdRef.current = sessionId;
const abortController = new AbortController();
abortControllerRef.current = abortController;
requestStartTimeRef.current = Date.now();
console.log("[useChatStream] Created new AbortController", {
sessionId,
previousSessionId,
requestStartTime: requestStartTimeRef.current,
});
if (abortController.signal.aborted) {
console.warn(
"[useChatStream] AbortController was aborted before request started",
);
requestStartTimeRef.current = null;
return Promise.reject(new Error("Request aborted"));
}
if (!isRetry) {
retryCountRef.current = 0;
}
setIsStreaming(true);
setError(null);
try {
const url = `/api/chat/sessions/${sessionId}/stream`;
const body = JSON.stringify({
message,
is_user_message: isUserMessage,
context: context || null,
});
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "text/event-stream",
},
body,
signal: abortController.signal,
});
console.info("[useChatStream] Stream response", {
sessionId,
status: response.status,
ok: response.ok,
contentType: response.headers.get("content-type"),
});
if (!response.ok) {
const errorText = await response.text();
console.warn("[useChatStream] Stream response error", {
sessionId,
status: response.status,
errorText,
});
throw new Error(errorText || `HTTP ${response.status}`);
}
if (!response.body) {
console.warn("[useChatStream] Response body is null", { sessionId });
throw new Error("Response body is null");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
let receivedChunkCount = 0;
let firstChunkAt: number | null = null;
let loggedLineCount = 0;
return new Promise<void>((resolve, reject) => {
let didDispatchStreamEnd = false;
function dispatchStreamEnd() {
if (didDispatchStreamEnd) return;
didDispatchStreamEnd = true;
onChunk({ type: "stream_end" });
}
const cleanup = () => {
reader.cancel().catch(() => {
// Ignore cancel errors
});
};
async function readStream() {
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
cleanup();
console.info("[useChatStream] Stream closed", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
dispatchStreamEnd();
retryCountRef.current = 0;
stopStreaming();
resolve();
return;
}
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (loggedLineCount < 3) {
console.info("[useChatStream] Raw stream line", {
sessionId,
data:
data.length > 300 ? `${data.slice(0, 300)}...` : data,
});
loggedLineCount += 1;
}
if (data === "[DONE]") {
cleanup();
console.info("[useChatStream] Stream done marker", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
dispatchStreamEnd();
retryCountRef.current = 0;
stopStreaming();
resolve();
return;
}
try {
const rawChunk = JSON.parse(data) as
| StreamChunk
| VercelStreamChunk;
const chunk = normalizeStreamChunk(rawChunk);
if (!chunk) {
continue;
}
if (!firstChunkAt) {
firstChunkAt = Date.now();
console.info("[useChatStream] First stream chunk", {
sessionId,
chunkType: chunk.type,
timeSinceStart: requestStartTimeRef.current
? firstChunkAt - requestStartTimeRef.current
: null,
});
}
receivedChunkCount += 1;
// Call the chunk handler
onChunk(chunk);
// Handle stream lifecycle
if (chunk.type === "stream_end") {
didDispatchStreamEnd = true;
cleanup();
console.info("[useChatStream] Stream end chunk", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
retryCountRef.current = 0;
stopStreaming();
resolve();
return;
} else if (chunk.type === "error") {
cleanup();
reject(
new Error(
chunk.message || chunk.content || "Stream error",
),
);
return;
}
} catch (err) {
// Skip invalid JSON lines
console.warn("Failed to parse SSE chunk:", err, data);
}
}
}
}
} catch (err) {
if (err instanceof Error && err.name === "AbortError") {
cleanup();
dispatchStreamEnd();
stopStreaming();
resolve();
return;
}
const streamError =
err instanceof Error ? err : new Error("Failed to read stream");
if (retryCountRef.current < MAX_RETRIES) {
retryCountRef.current += 1;
const retryDelay =
INITIAL_RETRY_DELAY * Math.pow(2, retryCountRef.current - 1);
toast.info("Connection interrupted", {
description: `Retrying in ${retryDelay / 1000} seconds...`,
});
retryTimeoutRef.current = setTimeout(() => {
sendMessage(
sessionId,
message,
onChunk,
isUserMessage,
context,
true,
).catch((_err) => {
// Retry failed
});
}, retryDelay);
} else {
setError(streamError);
toast.error("Connection Failed", {
description:
"Unable to connect to chat service. Please try again.",
});
cleanup();
dispatchStreamEnd();
retryCountRef.current = 0;
stopStreaming();
reject(streamError);
}
}
}
readStream();
});
} catch (err) {
if (err instanceof Error && err.name === "AbortError") {
setIsStreaming(false);
return Promise.resolve();
}
const streamError =
err instanceof Error ? err : new Error("Failed to start stream");
setError(streamError);
setIsStreaming(false);
throw streamError;
const completed = getCompletedStream(completedSessionId);
if (completed?.error) {
setError(completed.error);
}
unregisterActiveSession(completedSessionId);
},
);
return unsubscribe;
}, []);
async function sendMessage(
sessionId: string,
message: string,
onChunk: (chunk: StreamChunk) => void,
isUserMessage: boolean = true,
context?: { url: string; content: string },
) {
const previousSessionId = currentSessionIdRef.current;
if (previousSessionId && previousSessionId !== sessionId) {
stopStreaming(previousSessionId);
}
currentSessionIdRef.current = sessionId;
onChunkCallbackRef.current = onChunk;
setIsStreaming(true);
setError(null);
registerActiveSession(sessionId);
try {
await startStream(sessionId, message, isUserMessage, context, onChunk);
const status = getStreamStatus(sessionId);
if (status === "error") {
const completed = getCompletedStream(sessionId);
if (completed?.error) {
setError(completed.error);
toast.error("Connection Failed", {
description: "Unable to connect to chat service. Please try again.",
});
throw completed.error;
}
}
},
[stopStreaming],
);
} catch (err) {
const streamError =
err instanceof Error ? err : new Error("Failed to start stream");
setError(streamError);
throw streamError;
} finally {
setIsStreaming(false);
}
}
return {
isStreaming,

View File

@@ -9,11 +9,12 @@ import { ReactNode, useEffect, useRef } from "react";
export function PostHogProvider({ children }: { children: ReactNode }) {
const isPostHogEnabled = environment.isPostHogEnabled();
const postHogCredentials = environment.getPostHogCredentials();
useEffect(() => {
if (process.env.NEXT_PUBLIC_POSTHOG_KEY) {
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, {
api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
if (postHogCredentials.key) {
posthog.init(postHogCredentials.key, {
api_host: postHogCredentials.host,
defaults: "2025-11-30",
capture_pageview: false,
capture_pageleave: true,

View File

@@ -0,0 +1,8 @@
"use client";
import { useNetworkStatus } from "./useNetworkStatus";
export function NetworkStatusMonitor() {
useNetworkStatus();
return null;
}

View File

@@ -0,0 +1,28 @@
"use client";
import { useEffect } from "react";
import { toast } from "sonner";
export function useNetworkStatus() {
useEffect(function monitorNetworkStatus() {
function handleOnline() {
toast.success("Connection restored", {
description: "You're back online",
});
}
function handleOffline() {
toast.error("You're offline", {
description: "Check your internet connection",
});
}
window.addEventListener("online", handleOnline);
window.addEventListener("offline", handleOffline);
return function cleanup() {
window.removeEventListener("online", handleOnline);
window.removeEventListener("offline", handleOffline);
};
}, []);
}