refactor: bot review suggestions

This commit is contained in:
Lluis Agusti
2026-01-21 19:28:13 +07:00
parent 89e87de031
commit 81c59c4ed3
24 changed files with 471 additions and 183 deletions

View File

@@ -16,19 +16,31 @@ See `docs/content/platform/getting-started.md` for setup instructions.
- Format Python code with `poetry run format`.
- Format frontend code using `pnpm format`.
## Frontend-specific guidelines
**When working on files in `autogpt_platform/frontend/`, always read and follow the conventions in `autogpt_platform/frontend/CONTRIBUTING.md`.**
## Frontend guidelines:
Key frontend conventions:
See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
1. **Pages**: Create in `src/app/(platform)/feature-name/page.tsx`
- Add `usePageName.ts` hook for logic
- Put sub-components in local `components/` folder
2. **Components**: Structure as `ComponentName/ComponentName.tsx` + `useComponentName.ts` + `helpers.ts`
- Use design system components from `src/components/` (atoms, molecules, organisms)
- Never use `src/components/__legacy__/*`
3. **Data fetching**: Use generated API hooks from `@/app/api/__generated__/endpoints/`
- Regenerate with `pnpm generate:api`
- Pattern: `use{Method}{Version}{OperationName}`
4. **Styling**: Tailwind CSS only, use design tokens, Phosphor Icons only
5. **Testing**: Add Storybook stories for new components, Playwright for E2E
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
- Colocate state when possible and avoid create large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
- Use function declarations for components and handlers, arrow functions only for small inline callbacks
- Use function declarations for components, arrow functions only for callbacks
- No barrel files or `index.ts` re-exports
See `autogpt_platform/frontend/CONTRIBUTING.md` for complete frontend architecture, patterns, and conventions.
- Do not use `useCallback` or `useMemo` unless strictly needed
- Avoid comments at all times unless the code is very complex
## Testing

View File

@@ -201,7 +201,7 @@ If you get any pushback or hit complex block conditions check the new_blocks gui
3. Write tests alongside the route file
4. Run `poetry run test` to verify
**Frontend feature development:**
### Frontend guidelines:
See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
@@ -217,6 +217,14 @@ See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
4. **Styling**: Tailwind CSS only, use design tokens, Phosphor Icons only
5. **Testing**: Add Storybook stories for new components, Playwright for E2E
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
- Colocate state when possible and avoid create large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
- Use function declarations for components, arrow functions only for callbacks
- No barrel files or `index.ts` re-exports
- Do not use `useCallback` or `useMemo` unless strictly needed
- Avoid comments at all times unless the code is very complex
### Security Implementation

View File

@@ -222,6 +222,8 @@ async def stream_chat_post(
session = await _validate_and_get_session(session_id, user_id)
async def event_generator() -> AsyncGenerator[str, None]:
chunk_count = 0
first_chunk_type: str | None = None
async for chunk in chat_service.stream_chat_completion(
session_id,
request.message,
@@ -230,7 +232,26 @@ async def stream_chat_post(
session=session, # Pass pre-fetched session to avoid double-fetch
context=request.context,
):
if chunk_count < 3:
logger.info(
"Chat stream chunk",
extra={
"session_id": session_id,
"chunk_type": str(chunk.type),
},
)
if not first_chunk_type:
first_chunk_type = str(chunk.type)
chunk_count += 1
yield chunk.to_sse()
logger.info(
"Chat stream completed",
extra={
"session_id": session_id,
"chunk_count": chunk_count,
"first_chunk_type": first_chunk_type,
},
)
# AI SDK protocol termination
yield "data: [DONE]\n\n"
@@ -275,6 +296,8 @@ async def stream_chat_get(
session = await _validate_and_get_session(session_id, user_id)
async def event_generator() -> AsyncGenerator[str, None]:
chunk_count = 0
first_chunk_type: str | None = None
async for chunk in chat_service.stream_chat_completion(
session_id,
message,
@@ -282,7 +305,26 @@ async def stream_chat_get(
user_id=user_id,
session=session, # Pass pre-fetched session to avoid double-fetch
):
if chunk_count < 3:
logger.info(
"Chat stream chunk",
extra={
"session_id": session_id,
"chunk_type": str(chunk.type),
},
)
if not first_chunk_type:
first_chunk_type = str(chunk.type)
chunk_count += 1
yield chunk.to_sse()
logger.info(
"Chat stream completed",
extra={
"session_id": session_id,
"chunk_count": chunk_count,
"first_chunk_type": first_chunk_type,
},
)
# AI SDK protocol termination
yield "data: [DONE]\n\n"

View File

@@ -6,7 +6,13 @@ from typing import Any
import orjson
from langfuse import get_client, propagate_attributes
from langfuse.openai import openai # type: ignore
from openai import APIConnectionError, APIError, APIStatusError, RateLimitError
from openai import (
APIConnectionError,
APIError,
APIStatusError,
PermissionDeniedError,
RateLimitError,
)
from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam
from backend.data.understanding import (
@@ -417,6 +423,7 @@ async def stream_chat_completion(
yield chunk
elif isinstance(chunk, StreamError):
has_yielded_error = True
yield chunk
elif isinstance(chunk, StreamUsage):
session.usage.append(
Usage(
@@ -463,8 +470,9 @@ async def stream_chat_completion(
# Add tool response messages after assistant message
messages_to_save.extend(tool_response_messages)
session.messages.extend(messages_to_save)
await upsert_chat_session(session)
if not has_saved_assistant_message:
session.messages.extend(messages_to_save)
await upsert_chat_session(session)
if not has_yielded_error:
error_message = str(e)
@@ -576,6 +584,12 @@ def _is_retryable_error(error: Exception) -> bool:
return False
def _is_region_blocked_error(error: Exception) -> bool:
if isinstance(error, PermissionDeniedError):
return "not available in your region" in str(error).lower()
return "not available in your region" in str(error).lower()
async def _stream_chat_chunks(
session: ChatSession,
tools: list[ChatCompletionToolParam],
@@ -768,7 +782,18 @@ async def _stream_chat_chunks(
f"Error in stream (not retrying): {e!s}",
exc_info=True,
)
error_response = StreamError(errorText=str(e))
error_code = None
error_text = str(e)
if _is_region_blocked_error(e):
error_code = "MODEL_NOT_AVAILABLE_REGION"
error_text = (
"This model is not available in your region. "
"Please connect via VPN and try again."
)
error_response = StreamError(
errorText=error_text,
code=error_code,
)
yield error_response
yield StreamFinish()
return

View File

@@ -2,6 +2,7 @@
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
import { Text } from "@/components/atoms/Text/Text";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useRouter } from "next/navigation";
import { useEffect, useRef } from "react";
@@ -16,6 +17,7 @@ function wait(ms: number): Promise<void> {
export default function LogoutPage() {
const { logOut } = useSupabase();
const { toast } = useToast();
const router = useRouter();
const hasStartedRef = useRef(false);
@@ -24,13 +26,21 @@ export default function LogoutPage() {
hasStartedRef.current = true;
async function runLogout() {
await logOut();
await wait(LOGOUT_REDIRECT_DELAY_MS);
router.replace("/login");
try {
await logOut();
} catch {
toast({
title: "Failed to log out. Redirecting to login.",
variant: "destructive",
});
} finally {
await wait(LOGOUT_REDIRECT_DELAY_MS);
router.replace("/login");
}
}
void runLogout();
}, []);
}, [logOut, router, toast]);
return (
<div className="flex min-h-screen items-center justify-center px-4">

View File

@@ -43,7 +43,7 @@ export function MobileDrawer({
<div className="shrink-0 border-b border-zinc-200 p-4">
<div className="flex items-center justify-between">
<Drawer.Title className="text-lg font-semibold text-zinc-800">
Your tasks
Your chats
</Drawer.Title>
<Button
variant="icon"

View File

@@ -12,6 +12,7 @@ export async function signup(
password: string,
confirmPassword: string,
agreeToTerms: boolean,
isChatEnabled: boolean,
) {
try {
const parsed = signupFormSchema.safeParse({
@@ -59,7 +60,9 @@ export async function signup(
}
const isOnboardingEnabled = await shouldShowOnboarding();
const next = isOnboardingEnabled ? "/onboarding" : getHomepageRoute();
const next = isOnboardingEnabled
? "/onboarding"
: getHomepageRoute(isChatEnabled);
return { success: true, next };
} catch (err) {

View File

@@ -108,6 +108,7 @@ export function useSignupPage() {
data.password,
data.confirmPassword,
data.agreeToTerms,
isChatEnabled === true,
);
setIsLoading(false);

View File

@@ -141,51 +141,6 @@
}
}
@keyframes shimmer {
0% {
background-position: -200% 0;
}
100% {
background-position: 200% 0;
}
}
@keyframes l3 {
25% {
background-position:
0 0,
100% 100%,
100% calc(100% - 5px);
}
50% {
background-position:
0 100%,
100% 100%,
0 calc(100% - 5px);
}
75% {
background-position:
0 100%,
100% 0,
100% 5px;
}
}
.loader {
width: 80px;
height: 70px;
border: 5px solid transparent;
padding: 0 8px;
box-sizing: border-box;
background:
linear-gradient(rgb(15 23 42) 0 0) 0 0/8px 20px,
linear-gradient(rgb(15 23 42) 0 0) 100% 0/8px 20px,
radial-gradient(farthest-side, rgb(15 23 42) 90%, #0000) 0 5px/8px 8px
content-box,
transparent;
background-repeat: no-repeat;
animation: l3 2s infinite linear;
}
input[type="number"]::-webkit-outer-spin-button,
input[type="number"]::-webkit-inner-spin-button {

View File

@@ -1,25 +1,19 @@
"use client";
import { getHomepageRoute } from "@/lib/constants";
import {
Flag,
type FlagValues,
useGetFlag,
} from "@/services/feature-flags/use-get-flag";
import { useFlags } from "launchdarkly-react-client-sdk";
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
import { useRouter } from "next/navigation";
import { useEffect } from "react";
export default function Page() {
const isChatEnabled = useGetFlag(Flag.CHAT);
const flags = useFlags<FlagValues>();
const router = useRouter();
const homepageRoute = getHomepageRoute(isChatEnabled);
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
const isFlagReady =
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
!isLaunchDarklyConfigured || typeof isChatEnabled === "boolean";
useEffect(
function redirectToHomepage() {

View File

@@ -1,8 +1,9 @@
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
import { cn } from "@/lib/utils";
import { useCallback } from "react";
import { usePageContext } from "../../usePageContext";
import { ChatInput } from "../ChatInput/ChatInput";
import { MessageList } from "../MessageList/MessageList";
import { useChatContainer } from "./useChatContainer";
@@ -20,26 +21,25 @@ export function ChatContainer({
initialPrompt,
className,
}: ChatContainerProps) {
const { messages, streamingChunks, isStreaming, sendMessage, stopStreaming } =
useChatContainer({
sessionId,
initialMessages,
initialPrompt,
});
const {
messages,
streamingChunks,
isStreaming,
stopStreaming,
isRegionBlockedModalOpen,
sendMessageWithContext,
handleRegionModalOpenChange,
handleRegionModalClose,
} = useChatContainer({
sessionId,
initialMessages,
initialPrompt,
});
const { capturePageContext } = usePageContext();
const breakpoint = useBreakpoint();
const isMobile =
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
const sendMessageWithContext = useCallback(
async (content: string, isUserMessage: boolean = true) => {
const context = capturePageContext();
await sendMessage(content, isUserMessage, context);
},
[sendMessage, capturePageContext],
);
return (
<div
className={cn(
@@ -47,6 +47,32 @@ export function ChatContainer({
className,
)}
>
<Dialog
title="Service unavailable"
controlled={{
isOpen: isRegionBlockedModalOpen,
set: handleRegionModalOpenChange,
}}
onClose={handleRegionModalClose}
>
<Dialog.Content>
<div className="flex flex-col gap-4">
<Text variant="body">
This model is not available in your region. Please connect via VPN
and try again.
</Text>
<div className="flex justify-end">
<Button
type="button"
variant="primary"
onClick={handleRegionModalClose}
>
Got it
</Button>
</div>
</div>
</Dialog.Content>
</Dialog>
{/* Messages - Scrollable */}
<div className="relative flex min-h-0 flex-1 flex-col">
<div className="flex min-h-full flex-col justify-end">

View File

@@ -1,6 +1,6 @@
import { toast } from "sonner";
import { StreamChunk } from "../../useChatStream";
import type { HandlerDependencies } from "./useChatContainer.handlers";
import type { HandlerDependencies } from "./handlers";
import {
handleError,
handleLoginNeeded,
@@ -9,12 +9,30 @@ import {
handleTextEnded,
handleToolCallStart,
handleToolResponse,
} from "./useChatContainer.handlers";
isRegionBlockedError,
} from "./handlers";
export function createStreamEventDispatcher(
deps: HandlerDependencies,
): (chunk: StreamChunk) => void {
return function dispatchStreamEvent(chunk: StreamChunk): void {
if (
chunk.type === "text_chunk" ||
chunk.type === "tool_call_start" ||
chunk.type === "tool_response" ||
chunk.type === "login_needed" ||
chunk.type === "need_login" ||
chunk.type === "error"
) {
if (!deps.hasResponseRef.current) {
console.info("[ChatStream] First response chunk:", {
type: chunk.type,
sessionId: deps.sessionId,
});
}
deps.hasResponseRef.current = true;
}
switch (chunk.type) {
case "text_chunk":
handleTextChunk(chunk, deps);
@@ -38,15 +56,23 @@ export function createStreamEventDispatcher(
break;
case "stream_end":
console.info("[ChatStream] Stream ended:", {
sessionId: deps.sessionId,
hasResponse: deps.hasResponseRef.current,
chunkCount: deps.streamingChunksRef.current.length,
});
handleStreamEnd(chunk, deps);
break;
case "error":
const isRegionBlocked = isRegionBlockedError(chunk);
handleError(chunk, deps);
// Show toast at dispatcher level to avoid circular dependencies
toast.error("Chat Error", {
description: chunk.message || chunk.content || "An error occurred",
});
if (!isRegionBlocked) {
toast.error("Chat Error", {
description: chunk.message || chunk.content || "An error occurred",
});
}
break;
case "usage":

View File

@@ -7,15 +7,30 @@ import {
parseToolResponse,
} from "./helpers";
function isToolCallMessage(
message: ChatMessageData,
): message is Extract<ChatMessageData, { type: "tool_call" }> {
return message.type === "tool_call";
}
export interface HandlerDependencies {
setHasTextChunks: Dispatch<SetStateAction<boolean>>;
setStreamingChunks: Dispatch<SetStateAction<string[]>>;
streamingChunksRef: MutableRefObject<string[]>;
hasResponseRef: MutableRefObject<boolean>;
setMessages: Dispatch<SetStateAction<ChatMessageData[]>>;
setIsStreamingInitiated: Dispatch<SetStateAction<boolean>>;
setIsRegionBlockedModalOpen: Dispatch<SetStateAction<boolean>>;
sessionId: string;
}
export function isRegionBlockedError(chunk: StreamChunk): boolean {
if (chunk.code === "MODEL_NOT_AVAILABLE_REGION") return true;
const message = chunk.message || chunk.content;
if (typeof message !== "string") return false;
return message.toLowerCase().includes("not available in your region");
}
export function handleTextChunk(chunk: StreamChunk, deps: HandlerDependencies) {
if (!chunk.content) return;
deps.setHasTextChunks(true);
@@ -51,14 +66,41 @@ export function handleToolCallStart(
chunk: StreamChunk,
deps: HandlerDependencies,
) {
const toolCallMessage: ChatMessageData = {
const toolCallMessage: Extract<ChatMessageData, { type: "tool_call" }> = {
type: "tool_call",
toolId: chunk.tool_id || `tool-${Date.now()}-${chunk.idx || 0}`,
toolName: chunk.tool_name || "Executing...",
toolName: chunk.tool_name || "Executing",
arguments: chunk.arguments || {},
timestamp: new Date(),
};
deps.setMessages((prev) => [...prev, toolCallMessage]);
function updateToolCallMessages(prev: ChatMessageData[]) {
const existingIndex = prev.findIndex(
function findToolCallIndex(msg) {
return isToolCallMessage(msg) && msg.toolId === toolCallMessage.toolId;
},
);
if (existingIndex === -1) {
return [...prev, toolCallMessage];
}
const nextMessages = [...prev];
const existing = nextMessages[existingIndex];
if (!isToolCallMessage(existing)) return prev;
const nextArguments =
toolCallMessage.arguments &&
Object.keys(toolCallMessage.arguments).length > 0
? toolCallMessage.arguments
: existing.arguments;
nextMessages[existingIndex] = {
...existing,
toolName: toolCallMessage.toolName || existing.toolName,
arguments: nextArguments,
timestamp: toolCallMessage.timestamp,
};
return nextMessages;
}
deps.setMessages(updateToolCallMessages);
}
export function handleToolResponse(
@@ -118,9 +160,13 @@ export function handleToolResponse(
const toolCallIndex = prev.findIndex(
(msg) => msg.type === "tool_call" && msg.toolId === chunk.tool_id,
);
const hasResponse = prev.some(
(msg) => msg.type === "tool_response" && msg.toolId === chunk.tool_id,
);
if (hasResponse) return prev;
if (toolCallIndex !== -1) {
const newMessages = [...prev];
newMessages[toolCallIndex] = responseMessage;
newMessages.splice(toolCallIndex + 1, 0, responseMessage);
return newMessages;
}
return [...prev, responseMessage];
@@ -147,6 +193,17 @@ export function handleStreamEnd(
deps: HandlerDependencies,
) {
const completedContent = deps.streamingChunksRef.current.join("");
if (!completedContent.trim() && !deps.hasResponseRef.current) {
deps.setMessages((prev) => [
...prev,
{
type: "message",
role: "assistant",
content: "No response received. Please try again.",
timestamp: new Date(),
},
]);
}
if (completedContent.trim()) {
const assistantMessage: ChatMessageData = {
type: "message",
@@ -165,6 +222,9 @@ export function handleStreamEnd(
export function handleError(chunk: StreamChunk, deps: HandlerDependencies) {
const errorMessage = chunk.message || chunk.content || "An error occurred";
console.error("Stream error:", errorMessage);
if (isRegionBlockedError(chunk)) {
deps.setIsRegionBlockedModalOpen(true);
}
deps.setIsStreamingInitiated(false);
deps.setHasTextChunks(false);
deps.setStreamingChunks([]);

View File

@@ -31,6 +31,9 @@ export function useChatContainer({
const [streamingChunks, setStreamingChunks] = useState<string[]>([]);
const [hasTextChunks, setHasTextChunks] = useState(false);
const [isStreamingInitiated, setIsStreamingInitiated] = useState(false);
const [isRegionBlockedModalOpen, setIsRegionBlockedModalOpen] =
useState(false);
const hasResponseRef = useRef(false);
const streamingChunksRef = useRef<string[]>([]);
const previousSessionIdRef = useRef<string | null>(null);
const {
@@ -42,14 +45,16 @@ export function useChatContainer({
useEffect(() => {
if (sessionId !== previousSessionIdRef.current) {
stopStreaming(previousSessionIdRef.current ?? undefined, true);
previousSessionIdRef.current = sessionId;
setMessages([]);
setStreamingChunks([]);
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(false);
hasResponseRef.current = false;
}
}, [sessionId]);
}, [sessionId, stopStreaming]);
const allMessages = useMemo(() => {
const processedInitialMessages: ChatMessageData[] = [];
@@ -168,6 +173,7 @@ export function useChatContainer({
console.error("[useChatContainer] Cannot send message: no session ID");
return;
}
setIsRegionBlockedModalOpen(false);
if (isUserMessage) {
const userMessage = createUserMessage(content);
setMessages((prev) => [...filterAuthMessages(prev), userMessage]);
@@ -178,12 +184,15 @@ export function useChatContainer({
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(true);
hasResponseRef.current = false;
const dispatcher = createStreamEventDispatcher({
setHasTextChunks,
setStreamingChunks,
streamingChunksRef,
hasResponseRef,
setMessages,
setIsRegionBlockedModalOpen,
sessionId,
setIsStreamingInitiated,
});
@@ -243,11 +252,32 @@ export function useChatContainer({
],
);
async function sendMessageWithContext(
content: string,
isUserMessage: boolean = true,
) {
const context = capturePageContext();
await sendMessage(content, isUserMessage, context);
}
function handleRegionModalOpenChange(open: boolean) {
setIsRegionBlockedModalOpen(open);
}
function handleRegionModalClose() {
setIsRegionBlockedModalOpen(false);
}
return {
messages: allMessages,
streamingChunks,
isStreaming,
error,
isRegionBlockedModalOpen,
setIsRegionBlockedModalOpen,
sendMessageWithContext,
handleRegionModalOpenChange,
handleRegionModalClose,
sendMessage,
stopStreaming: handleStopStreaming,
};

View File

@@ -1,13 +0,0 @@
.loader {
width: 20px;
aspect-ratio: 1;
border-radius: 50%;
background: #000;
box-shadow: 0 0 0 0 #0004;
animation: l1 1s infinite;
}
@keyframes l1 {
100% {
box-shadow: 0 0 0 30px #0000;
}
}

View File

@@ -1,5 +1,12 @@
import styles from "./ChatLoader.module.css";
import { Text } from "@/components/atoms/Text/Text";
export function ChatLoader() {
return <div className={styles.loader} />;
return (
<Text
variant="small"
className="text-xs bg-gradient-to-r from-neutral-600 via-neutral-500 to-neutral-600 bg-[length:200%_100%] bg-clip-text text-transparent [animation:shimmer_2s_ease-in-out_infinite]"
>
Taking a bit more time...
</Text>
);
}

View File

@@ -22,6 +22,21 @@ import { ToolCallMessage } from "../ToolCallMessage/ToolCallMessage";
import { ToolResponseMessage } from "../ToolResponseMessage/ToolResponseMessage";
import { UserChatBubble } from "../UserChatBubble/UserChatBubble";
import { useChatMessage, type ChatMessageData } from "./useChatMessage";
function stripInternalReasoning(content: string): string {
const cleaned = content.replace(
/<internal_reasoning>[\s\S]*?<\/internal_reasoning>/gi,
"",
);
return cleaned.replace(/\n{3,}/g, "\n\n").trim();
}
function getDisplayContent(message: ChatMessageData, isUser: boolean): string {
if (message.type !== "message") return "";
if (isUser) return message.content;
return stripInternalReasoning(message.content);
}
export interface ChatMessageProps {
message: ChatMessageData;
messages?: ChatMessageData[];
@@ -56,6 +71,7 @@ export function ChatMessage({
isLoginNeeded,
isCredentialsNeeded,
} = useChatMessage(message);
const displayContent = getDisplayContent(message, isUser);
const handleAllCredentialsComplete = useCallback(
function handleAllCredentialsComplete() {
@@ -81,17 +97,22 @@ export function ChatMessage({
}
}
const handleCopy = useCallback(async () => {
const handleCopy = useCallback(async function handleCopy() {
if (message.type !== "message") return;
if (!displayContent) return;
try {
await navigator.clipboard.writeText(message.content);
await navigator.clipboard.writeText(displayContent);
setCopied(true);
setTimeout(() => setCopied(false), 2000);
} catch (error) {
console.error("Failed to copy:", error);
}
}, [message]);
}, [displayContent, message]);
function isLongResponse(content: string): boolean {
return content.split("\n").length > 5;
}
const handleTryAgain = useCallback(() => {
if (message.type !== "message" || !onSendMessage) return;
@@ -278,11 +299,11 @@ export function ChatMessage({
>
{isUser ? (
<UserChatBubble>
<MarkdownContent content={message.content} />
<MarkdownContent content={displayContent} />
</UserChatBubble>
) : (
<AIChatBubble>
<MarkdownContent content={message.content} />
<MarkdownContent content={displayContent} />
{agentOutput && agentOutput.type === "tool_response" && (
<div className="mt-4">
<ToolResponseMessage
@@ -310,20 +331,22 @@ export function ChatMessage({
<ArrowsClockwiseIcon className="size-4 text-zinc-600" />
</Button>
)}
{(isUser || isFinalMessage) && (
<Button
variant="ghost"
size="icon"
onClick={handleCopy}
aria-label="Copy message"
>
{copied ? (
<CheckIcon className="size-4 text-green-600" />
) : (
<CopyIcon className="size-4 text-zinc-600" />
)}
</Button>
)}
{!isUser &&
isFinalMessage &&
isLongResponse(displayContent) && (
<Button
variant="ghost"
size="icon"
onClick={handleCopy}
aria-label="Copy message"
>
{copied ? (
<CheckIcon className="size-4 text-green-600" />
) : (
<CopyIcon className="size-4 text-zinc-600" />
)}
</Button>
)}
</div>
</div>
</div>

View File

@@ -1,6 +1,7 @@
import { cn } from "@/lib/utils";
import { useEffect, useRef, useState } from "react";
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
import { ChatLoader } from "../ChatLoader/ChatLoader";
export interface ThinkingMessageProps {
className?: string;
@@ -37,12 +38,7 @@ export function ThinkingMessage({ className }: ThinkingMessageProps) {
<AIChatBubble>
<div className="transition-all duration-500 ease-in-out">
{showSlowLoader ? (
<div className="flex flex-col items-center gap-3 py-2">
<div className="loader" style={{ flexShrink: 0 }} />
<p className="text-sm text-slate-700">
Taking a bit longer to think, wait a moment please
</p>
</div>
<ChatLoader />
) : (
<span
className="inline-block bg-gradient-to-r from-neutral-400 via-neutral-600 to-neutral-400 bg-clip-text text-transparent"

View File

@@ -21,6 +21,12 @@ import {
* @returns A human-friendly action phrase in present continuous tense
*/
export function getToolActionPhrase(toolName: string): string {
const normalizedName = toolName.trim();
if (!normalizedName) return "Executing";
if (normalizedName.toLowerCase().startsWith("executing")) {
return normalizedName;
}
if (normalizedName.toLowerCase() === "unknown") return "Executing";
const toolActionPhrases: Record<string, string> = {
add_understanding: "Updating your business information",
create_agent: "Creating a new agent",

View File

@@ -53,7 +53,10 @@ export function formatToolResponse(result: unknown, toolName: string): string {
switch (responseType) {
case "agents_found":
const agents = (response.agents as Array<{ name: string }>) || [];
const count = (response.count as number) || 0;
const count =
typeof response.count === "number" && !isNaN(response.count)
? response.count
: agents.length;
if (count === 0) {
return "No agents found matching your search.";
}
@@ -71,7 +74,10 @@ export function formatToolResponse(result: unknown, toolName: string): string {
case "block_list":
const blocks = (response.blocks as Array<{ name: string }>) || [];
const blockCount = (response.count as number) || 0;
const blockCount =
typeof response.count === "number" && !isNaN(response.count)
? response.count
: blocks.length;
if (blockCount === 0) {
return "No blocks found matching your search.";
}
@@ -91,7 +97,10 @@ export function formatToolResponse(result: unknown, toolName: string): string {
case "doc_search_results":
const docResults = (response.results as Array<{ title: string }>) || [];
const docCount = (response.count as number) || 0;
const docCount =
typeof response.count === "number" && !isNaN(response.count)
? response.count
: docResults.length;
if (docCount === 0) {
return "No documentation found matching your search.";
}
@@ -194,9 +203,9 @@ export function formatToolResponse(result: unknown, toolName: string): string {
const outputAgentName = (response.agent_name as string) || "Agent";
const execution = response.execution as
| {
status?: string;
outputs?: Record<string, unknown>;
}
status?: string;
outputs?: Record<string, unknown>;
}
| undefined;
if (execution) {
const status = execution.status || "completed";

View File

@@ -7,20 +7,22 @@ const INITIAL_RETRY_DELAY = 1000;
export interface StreamChunk {
type:
| "text_chunk"
| "text_ended"
| "tool_call"
| "tool_call_start"
| "tool_response"
| "login_needed"
| "need_login"
| "credentials_needed"
| "error"
| "usage"
| "stream_end";
| "text_chunk"
| "text_ended"
| "tool_call"
| "tool_call_start"
| "tool_response"
| "login_needed"
| "need_login"
| "credentials_needed"
| "error"
| "usage"
| "stream_end";
timestamp?: string;
content?: string;
message?: string;
code?: string;
details?: Record<string, unknown>;
tool_id?: string;
tool_name?: string;
arguments?: ToolArguments;
@@ -49,30 +51,30 @@ type VercelStreamChunk =
| { type: "text-end"; id: string }
| { type: "tool-input-start"; toolCallId: string; toolName: string }
| {
type: "tool-input-available";
toolCallId: string;
toolName: string;
input: ToolArguments;
}
type: "tool-input-available";
toolCallId: string;
toolName: string;
input: ToolArguments;
}
| {
type: "tool-output-available";
toolCallId: string;
toolName?: string;
output: ToolResult;
success?: boolean;
}
type: "tool-output-available";
toolCallId: string;
toolName?: string;
output: ToolResult;
success?: boolean;
}
| {
type: "usage";
promptTokens: number;
completionTokens: number;
totalTokens: number;
}
type: "usage";
promptTokens: number;
completionTokens: number;
totalTokens: number;
}
| {
type: "error";
errorText: string;
code?: string;
details?: Record<string, unknown>;
};
type: "error";
errorText: string;
code?: string;
details?: Record<string, unknown>;
};
const LEGACY_STREAM_TYPES = new Set<StreamChunk["type"]>([
"text_chunk",
@@ -138,8 +140,18 @@ function normalizeStreamChunk(
return { type: "stream_end" };
case "start":
case "text-start":
case "tool-input-start":
return null;
case "tool-input-start":
const toolInputStart = chunk as Extract<
VercelStreamChunk,
{ type: "tool-input-start" }
>;
return {
type: "tool_call_start",
tool_id: toolInputStart.toolCallId,
tool_name: toolInputStart.toolName,
arguments: {},
};
}
}
@@ -311,18 +323,34 @@ export function useChatStream() {
signal: abortController.signal,
});
console.info("[useChatStream] Stream response", {
sessionId,
status: response.status,
ok: response.ok,
contentType: response.headers.get("content-type"),
});
if (!response.ok) {
const errorText = await response.text();
console.warn("[useChatStream] Stream response error", {
sessionId,
status: response.status,
errorText,
});
throw new Error(errorText || `HTTP ${response.status}`);
}
if (!response.body) {
console.warn("[useChatStream] Response body is null", { sessionId });
throw new Error("Response body is null");
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
let receivedChunkCount = 0;
let firstChunkAt: number | null = null;
let loggedLineCount = 0;
return new Promise<void>((resolve, reject) => {
let didDispatchStreamEnd = false;
@@ -346,6 +374,13 @@ export function useChatStream() {
if (done) {
cleanup();
console.info("[useChatStream] Stream closed", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
dispatchStreamEnd();
retryCountRef.current = 0;
stopStreaming();
@@ -360,8 +395,22 @@ export function useChatStream() {
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (loggedLineCount < 3) {
console.info("[useChatStream] Raw stream line", {
sessionId,
data: data.length > 300 ? `${data.slice(0, 300)}...` : data,
});
loggedLineCount += 1;
}
if (data === "[DONE]") {
cleanup();
console.info("[useChatStream] Stream done marker", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
dispatchStreamEnd();
retryCountRef.current = 0;
stopStreaming();
@@ -378,6 +427,18 @@ export function useChatStream() {
continue;
}
if (!firstChunkAt) {
firstChunkAt = Date.now();
console.info("[useChatStream] First stream chunk", {
sessionId,
chunkType: chunk.type,
timeSinceStart: requestStartTimeRef.current
? firstChunkAt - requestStartTimeRef.current
: null,
});
}
receivedChunkCount += 1;
// Call the chunk handler
onChunk(chunk);
@@ -385,6 +446,13 @@ export function useChatStream() {
if (chunk.type === "stream_end") {
didDispatchStreamEnd = true;
cleanup();
console.info("[useChatStream] Stream end chunk", {
sessionId,
receivedChunkCount,
timeSinceStart: requestStartTimeRef.current
? Date.now() - requestStartTimeRef.current
: null,
});
retryCountRef.current = 0;
stopStreaming();
resolve();

View File

@@ -10,11 +10,10 @@ export function AccountLogoutOption() {
}
return (
<div
<button
className="inline-flex w-full items-center justify-start gap-2.5"
onClick={handleLogout}
role="button"
tabIndex={0}
type="button"
>
<div className="relative h-4 w-4">
<IconLogOut />
@@ -22,6 +21,6 @@ export function AccountLogoutOption() {
<div className="font-sans text-base font-medium leading-normal text-neutral-800 dark:text-neutral-200">
Log out
</div>
</div>
</button>
);
}

View File

@@ -17,7 +17,7 @@ export function MobileNavbarLogoutItem({ icon, text }: Props) {
}
return (
<div className="w-full" onClick={handleLogout}>
<button className="w-full" onClick={handleLogout} type="button">
<div className="inline-flex w-full items-center justify-start gap-4 py-2 hover:rounded hover:bg-[#e0e0e0]">
{getAccountMenuOptionIcon(icon)}
<div className="relative">
@@ -26,6 +26,6 @@ export function MobileNavbarLogoutItem({ icon, text }: Props) {
</div>
</div>
</div>
</div>
</button>
);
}

View File

@@ -15,5 +15,6 @@ export const NAVBAR_HEIGHT_PX = 60;
// Routes
export function getHomepageRoute(isChatEnabled?: boolean | null): string {
if (isChatEnabled === true) return "/copilot";
return "/library";
if (isChatEnabled === false) return "/library";
return "/";
}