chore: more changes

This commit is contained in:
Lluis Agusti
2026-01-21 20:38:15 +07:00
parent de29445a3d
commit d335f9d9d1
9 changed files with 144 additions and 17 deletions

View File

@@ -35,7 +35,7 @@ See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
- Colocate state when possible and avoid create large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Colocate state when possible and avoid creating large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
- Use function declarations for components, arrow functions only for callbacks
- No barrel files or `index.ts` re-exports

View File

@@ -219,7 +219,7 @@ See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
- Colocate state when possible and avoid create large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Colocate state when possible and avoid creating large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
- Use function declarations for components, arrow functions only for callbacks
- No barrel files or `index.ts` re-exports

View File

@@ -290,6 +290,11 @@ async def _cache_session(session: ChatSession) -> None:
await async_redis.setex(redis_key, config.session_ttl, session.model_dump_json())
async def cache_chat_session(session: ChatSession) -> None:
"""Cache a chat session without persisting to the database."""
await _cache_session(session)
async def _get_session_from_db(session_id: str) -> ChatSession | None:
"""Get a chat session from the database."""
prisma_session = await chat_db.get_chat_session(session_id)

View File

@@ -161,7 +161,7 @@ async def create_session(
async def get_session(
session_id: str,
user_id: Annotated[str | None, Depends(auth.get_user_id)],
) -> SessionDetailResponse | None:
) -> SessionDetailResponse:
"""
Retrieve the details of a specific chat session.
@@ -177,7 +177,7 @@ async def get_session(
"""
session = await get_chat_session(session_id, user_id)
if not session:
return None
raise NotFoundError(f"Session {session_id} not found.")
messages = [message.model_dump() for message in session.messages]
logger.info(

View File

@@ -1,5 +1,6 @@
import asyncio
import logging
import time
from collections.abc import AsyncGenerator
from typing import Any
@@ -27,6 +28,7 @@ from .model import (
ChatMessage,
ChatSession,
Usage,
cache_chat_session,
get_chat_session,
update_session_title,
upsert_chat_session,
@@ -303,6 +305,9 @@ async def stream_chat_completion(
)
accumulated_tool_calls: list[dict[str, Any]] = []
has_saved_assistant_message = False
has_appended_streaming_message = False
last_cache_time = 0.0
last_cache_content_len = 0
# Wrap main logic in try/finally to ensure Langfuse observations are always ended
has_yielded_end = False
@@ -339,6 +344,23 @@ async def stream_chat_completion(
assert assistant_response.content is not None
assistant_response.content += delta
has_received_text = True
if not has_appended_streaming_message:
session.messages.append(assistant_response)
has_appended_streaming_message = True
current_time = time.monotonic()
content_len = len(assistant_response.content)
if (
current_time - last_cache_time >= 1.0
and content_len > last_cache_content_len
):
try:
await cache_chat_session(session)
except Exception as e:
logger.warning(
f"Failed to cache partial session {session.session_id}: {e}"
)
last_cache_time = current_time
last_cache_content_len = content_len
yield chunk
elif isinstance(chunk, StreamTextEnd):
# Emit text-end after text completes
@@ -397,17 +419,25 @@ async def stream_chat_completion(
if has_received_text and not text_streaming_ended:
yield StreamTextEnd(id=text_block_id)
text_streaming_ended = True
# Save assistant message before yielding finish to ensure it's persisted
# even if client disconnects immediately after receiving StreamFinish
if not has_saved_assistant_message:
messages_to_save_early: list[ChatMessage] = []
if accumulated_tool_calls:
assistant_response.tool_calls = accumulated_tool_calls
if assistant_response.content or assistant_response.tool_calls:
assistant_response.tool_calls = (
accumulated_tool_calls
)
if (
not has_appended_streaming_message
and (
assistant_response.content
or assistant_response.tool_calls
)
):
messages_to_save_early.append(assistant_response)
messages_to_save_early.extend(tool_response_messages)
if messages_to_save_early:
session.messages.extend(messages_to_save_early)
logger.info(
@@ -416,9 +446,10 @@ async def stream_chat_completion(
f"tool_calls={len(assistant_response.tool_calls or [])}, "
f"tool_responses={len(tool_response_messages)}"
)
if messages_to_save_early or has_appended_streaming_message:
await upsert_chat_session(session)
has_saved_assistant_message = True
has_yielded_end = True
yield chunk
elif isinstance(chunk, StreamError):
@@ -443,6 +474,27 @@ async def stream_chat_completion(
langfuse.update_current_trace(output=str(tool_response_messages))
langfuse.update_current_span(output=str(tool_response_messages))
except asyncio.CancelledError:
if not has_saved_assistant_message:
if accumulated_tool_calls:
assistant_response.tool_calls = accumulated_tool_calls
if assistant_response.content:
assistant_response.content = (
f"{assistant_response.content}\n\n[interrupted]"
)
else:
assistant_response.content = "[interrupted]"
if not has_appended_streaming_message:
session.messages.append(assistant_response)
if tool_response_messages:
session.messages.extend(tool_response_messages)
try:
await upsert_chat_session(session)
except Exception as e:
logger.warning(
f"Failed to save interrupted session {session.session_id}: {e}"
)
raise
except Exception as e:
logger.error(f"Error during stream: {e!s}", exc_info=True)
@@ -464,15 +516,20 @@ async def stream_chat_completion(
# Add assistant message if it has content or tool calls
if accumulated_tool_calls:
assistant_response.tool_calls = accumulated_tool_calls
if assistant_response.content or assistant_response.tool_calls:
if (
not has_appended_streaming_message
and (assistant_response.content or assistant_response.tool_calls)
):
messages_to_save.append(assistant_response)
# Add tool response messages after assistant message
messages_to_save.extend(tool_response_messages)
if not has_saved_assistant_message:
session.messages.extend(messages_to_save)
await upsert_chat_session(session)
if messages_to_save:
session.messages.extend(messages_to_save)
if messages_to_save or has_appended_streaming_message:
await upsert_chat_session(session)
if not has_yielded_error:
error_message = str(e)
@@ -519,7 +576,10 @@ async def stream_chat_completion(
logger.info(
f"Added {len(accumulated_tool_calls)} tool calls to assistant message"
)
if assistant_response.content or assistant_response.tool_calls:
if (
not has_appended_streaming_message
and (assistant_response.content or assistant_response.tool_calls)
):
messages_to_save.append(assistant_response)
logger.info(
f"Saving assistant message with content_len={len(assistant_response.content or '')}, tool_calls={len(assistant_response.tool_calls or [])}"
@@ -537,6 +597,7 @@ async def stream_chat_completion(
logger.info(
f"Extended session messages, new message_count={len(session.messages)}"
)
if messages_to_save or has_appended_streaming_message:
await upsert_chat_session(session)
else:
logger.info(

View File

@@ -132,6 +132,10 @@ export default function CopilotPage() {
startChatWithPrompt(action);
}
function handleSessionNotFound() {
router.replace("/copilot");
}
if (!isFlagReady || isChatEnabled === false || !isLoggedIn) {
return null;
}
@@ -145,6 +149,7 @@ export default function CopilotPage() {
className="flex-1"
urlSessionId={pageState.sessionId}
initialPrompt={pageState.initialPrompt}
onSessionNotFound={handleSessionNotFound}
/>
</div>
);

View File

@@ -2,6 +2,7 @@
import { Text } from "@/components/atoms/Text/Text";
import { cn } from "@/lib/utils";
import { useEffect, useRef } from "react";
import { ChatContainer } from "./components/ChatContainer/ChatContainer";
import { ChatErrorState } from "./components/ChatErrorState/ChatErrorState";
import { ChatLoader } from "./components/ChatLoader/ChatLoader";
@@ -11,19 +12,39 @@ export interface ChatProps {
className?: string;
urlSessionId?: string | null;
initialPrompt?: string;
onSessionNotFound?: () => void;
}
export function Chat({ className, urlSessionId, initialPrompt }: ChatProps) {
export function Chat({
className,
urlSessionId,
initialPrompt,
onSessionNotFound,
}: ChatProps) {
const hasHandledNotFoundRef = useRef(false);
const {
messages,
isLoading,
isCreating,
error,
isSessionNotFound,
sessionId,
createSession,
showLoader,
} = useChat({ urlSessionId });
useEffect(
function handleMissingSession() {
if (!onSessionNotFound) return;
if (!urlSessionId) return;
if (!isSessionNotFound || isLoading || isCreating) return;
if (hasHandledNotFoundRef.current) return;
hasHandledNotFoundRef.current = true;
onSessionNotFound();
},
[onSessionNotFound, urlSessionId, isSessionNotFound, isLoading, isCreating],
);
return (
<div className={cn("flex h-full flex-col", className)}>
{/* Main Content */}

View File

@@ -22,6 +22,7 @@ export function useChat({ urlSessionId }: UseChatArgs = {}) {
isLoading,
isCreating,
error,
isSessionNotFound,
createSession,
claimSession,
clearSession: clearSessionBase,
@@ -47,9 +48,9 @@ export function useChat({ urlSessionId }: UseChatArgs = {}) {
sendStreamMessage(
sessionIdFromHook,
"User has successfully logged in.",
() => {},
() => { },
false,
).catch(() => {});
).catch(() => { });
})
.catch(() => {
hasClaimedSessionRef.current = false;
@@ -110,6 +111,7 @@ export function useChat({ urlSessionId }: UseChatArgs = {}) {
isLoading,
isCreating,
error,
isSessionNotFound,
createSession,
clearSession,
loadSession,

View File

@@ -58,6 +58,8 @@ export function useChatSession({
query: {
enabled: !!sessionId,
select: okData,
retry: shouldRetrySessionLoad,
retryDelay: getSessionRetryDelay,
},
});
@@ -141,7 +143,8 @@ export function useChatSession({
const queryOptions = getGetV2GetSessionQueryOptions(id, {
query: {
staleTime: 0, // Force fresh fetch
retry: 1,
retry: shouldRetrySessionLoad,
retryDelay: getSessionRetryDelay,
},
});
const result = await queryClient.fetchQuery(queryOptions);
@@ -223,6 +226,7 @@ export function useChatSession({
isLoading,
isCreating,
error,
isSessionNotFound: isNotFoundError(loadError),
createSession,
loadSession,
refreshSession,
@@ -230,3 +234,32 @@ export function useChatSession({
clearSession,
};
}
function isNotFoundError(error: unknown): boolean {
if (!error || typeof error !== "object") return false;
if ("status" in error && error.status === 404) return true;
if (
"response" in error &&
typeof error.response === "object" &&
error.response !== null &&
"status" in error.response &&
error.response.status === 404
) {
return true;
}
return false;
}
function shouldRetrySessionLoad(
failureCount: number,
error: unknown,
): boolean {
if (!isNotFoundError(error)) return false;
return failureCount <= 2;
}
function getSessionRetryDelay(attemptIndex: number): number {
if (attemptIndex === 0) return 3000;
if (attemptIndex === 1) return 5000;
return 0;
}