+
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts
index cd05563369..32cdc3ea7e 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts
@@ -1,6 +1,33 @@
+import {
+ SessionKey,
+ sessionStorage,
+} from "@/services/storage/session-storage";
import type { ToolResult } from "@/types/chat";
import type { ChatMessageData } from "../ChatMessage/useChatMessage";
+export function hasSentInitialPrompt(sessionId: string): boolean {
+ try {
+ const sent = JSON.parse(
+ sessionStorage.get(SessionKey.CHAT_SENT_INITIAL_PROMPTS) || "{}",
+ );
+ return sent[sessionId] === true;
+ } catch {
+ return false;
+ }
+}
+
+export function markInitialPromptSent(sessionId: string): void {
+ try {
+ const sent = JSON.parse(
+ sessionStorage.get(SessionKey.CHAT_SENT_INITIAL_PROMPTS) || "{}",
+ );
+ sent[sessionId] = true;
+ sessionStorage.set(SessionKey.CHAT_SENT_INITIAL_PROMPTS, JSON.stringify(sent));
+ } catch {
+ // Ignore storage errors
+ }
+}
+
export function removePageContext(content: string): string {
// Remove "Page URL: ..." pattern at start of line (case insensitive, handles various formats)
let cleaned = content.replace(/^\s*Page URL:\s*[^\n\r]*/gim, "");
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.handlers.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.handlers.ts
index 6d4b97ea89..74b622ac1f 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.handlers.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.handlers.ts
@@ -30,18 +30,9 @@ export function handleTextEnded(
_chunk: StreamChunk,
deps: HandlerDependencies,
) {
- console.log("[Text Ended] Saving streamed text as assistant message");
const completedText = deps.streamingChunksRef.current.join("");
if (completedText.trim()) {
deps.setMessages((prev) => {
- const lastMessage = prev[prev.length - 1];
- console.log("[Text Ended] Previous message:", {
- type: lastMessage?.type,
- toolName:
- lastMessage?.type === "tool_call" ? lastMessage.toolName : undefined,
- content: completedText.substring(0, 200),
- });
-
const assistantMessage: ChatMessageData = {
type: "message",
role: "assistant",
@@ -68,22 +59,12 @@ export function handleToolCallStart(
timestamp: new Date(),
};
deps.setMessages((prev) => [...prev, toolCallMessage]);
- console.log("[Tool Call Start]", {
- toolId: toolCallMessage.toolId,
- toolName: toolCallMessage.toolName,
- timestamp: new Date().toISOString(),
- });
}
export function handleToolResponse(
chunk: StreamChunk,
deps: HandlerDependencies,
) {
- console.log("[Tool Response] Received:", {
- toolId: chunk.tool_id,
- toolName: chunk.tool_name,
- timestamp: new Date().toISOString(),
- });
let toolName = chunk.tool_name || "unknown";
if (!chunk.tool_name || chunk.tool_name === "unknown") {
deps.setMessages((prev) => {
@@ -140,19 +121,8 @@ export function handleToolResponse(
if (toolCallIndex !== -1) {
const newMessages = [...prev];
newMessages[toolCallIndex] = responseMessage;
- console.log(
- "[Tool Response] Replaced tool_call with matching tool_id:",
- chunk.tool_id,
- "at index:",
- toolCallIndex,
- );
return newMessages;
}
- console.warn(
- "[Tool Response] No tool_call found with tool_id:",
- chunk.tool_id,
- "appending instead",
- );
return [...prev, responseMessage];
});
}
@@ -177,50 +147,19 @@ export function handleStreamEnd(
deps: HandlerDependencies,
) {
const completedContent = deps.streamingChunksRef.current.join("");
- // Only save message if there are uncommitted chunks
- // (text_ended already saved if there were tool calls)
if (completedContent.trim()) {
- console.log(
- "[Stream End] Saving remaining streamed text as assistant message",
- );
const assistantMessage: ChatMessageData = {
type: "message",
role: "assistant",
content: completedContent,
timestamp: new Date(),
};
- deps.setMessages((prev) => {
- const updated = [...prev, assistantMessage];
- console.log("[Stream End] Final state:", {
- localMessages: updated.map((m) => ({
- type: m.type,
- ...(m.type === "message" && {
- role: m.role,
- contentLength: m.content.length,
- }),
- ...(m.type === "tool_call" && {
- toolId: m.toolId,
- toolName: m.toolName,
- }),
- ...(m.type === "tool_response" && {
- toolId: m.toolId,
- toolName: m.toolName,
- success: m.success,
- }),
- })),
- streamingChunks: deps.streamingChunksRef.current,
- timestamp: new Date().toISOString(),
- });
- return updated;
- });
- } else {
- console.log("[Stream End] No uncommitted chunks, message already saved");
+ deps.setMessages((prev) => [...prev, assistantMessage]);
}
deps.setStreamingChunks([]);
deps.streamingChunksRef.current = [];
deps.setHasTextChunks(false);
deps.setIsStreamingInitiated(false);
- console.log("[Stream End] Stream complete, messages in local state");
}
export function handleError(chunk: StreamChunk, deps: HandlerDependencies) {
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts
index 8e7dee7718..c9c9c51a29 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts
@@ -1,14 +1,17 @@
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
-import { useCallback, useMemo, useRef, useState } from "react";
+import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { toast } from "sonner";
import { useChatStream } from "../../useChatStream";
+import { usePageContext } from "../../usePageContext";
import type { ChatMessageData } from "../ChatMessage/useChatMessage";
import { createStreamEventDispatcher } from "./createStreamEventDispatcher";
import {
createUserMessage,
filterAuthMessages,
+ hasSentInitialPrompt,
isToolCallArray,
isValidMessage,
+ markInitialPromptSent,
parseToolResponse,
removePageContext,
} from "./helpers";
@@ -16,9 +19,10 @@ import {
interface Args {
sessionId: string | null;
initialMessages: SessionDetailResponse["messages"];
+ initialPrompt?: string;
}
-export function useChatContainer({ sessionId, initialMessages }: Args) {
+export function useChatContainer({ sessionId, initialMessages, initialPrompt }: Args) {
const [messages, setMessages] = useState
([]);
const [streamingChunks, setStreamingChunks] = useState([]);
const [hasTextChunks, setHasTextChunks] = useState(false);
@@ -29,7 +33,6 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
const allMessages = useMemo(() => {
const processedInitialMessages: ChatMessageData[] = [];
- // Map to track tool calls by their ID so we can look up tool names for tool responses
const toolCallMap = new Map();
for (const msg of initialMessages) {
@@ -45,13 +48,9 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
? new Date(msg.timestamp as string)
: undefined;
- // Remove page context from user messages when loading existing sessions
if (role === "user") {
content = removePageContext(content);
- // Skip user messages that become empty after removing page context
- if (!content.trim()) {
- continue;
- }
+ if (!content.trim()) continue;
processedInitialMessages.push({
type: "message",
role: "user",
@@ -61,19 +60,15 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
continue;
}
- // Handle assistant messages first (before tool messages) to build tool call map
if (role === "assistant") {
- // Strip tags from content
content = content
.replace(/[\s\S]*?<\/thinking>/gi, "")
.trim();
- // If assistant has tool calls, create tool_call messages for each
if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) {
for (const toolCall of toolCalls) {
const toolName = toolCall.function.name;
const toolId = toolCall.id;
- // Store tool name for later lookup
toolCallMap.set(toolId, toolName);
try {
@@ -96,7 +91,6 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
});
}
}
- // Only add assistant message if there's content after stripping thinking tags
if (content.trim()) {
processedInitialMessages.push({
type: "message",
@@ -106,7 +100,6 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
});
}
} else if (content.trim()) {
- // Assistant message without tool calls, but with content
processedInitialMessages.push({
type: "message",
role: "assistant",
@@ -117,7 +110,6 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
continue;
}
- // Handle tool messages - look up tool name from tool call map
if (role === "tool") {
const toolCallId = (msg.tool_call_id as string) || "";
const toolName = toolCallMap.get(toolCallId) || "unknown";
@@ -133,7 +125,6 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
continue;
}
- // Handle other message types (system, etc.)
if (content.trim()) {
processedInitialMessages.push({
type: "message",
@@ -154,7 +145,7 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
context?: { url: string; content: string },
) {
if (!sessionId) {
- console.error("Cannot send message: no session ID");
+ console.error("[useChatContainer] Cannot send message: no session ID");
return;
}
if (isUserMessage) {
@@ -167,6 +158,7 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
streamingChunksRef.current = [];
setHasTextChunks(false);
setIsStreamingInitiated(true);
+
const dispatcher = createStreamEventDispatcher({
setHasTextChunks,
setStreamingChunks,
@@ -175,6 +167,7 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
sessionId,
setIsStreamingInitiated,
});
+
try {
await sendStreamMessage(
sessionId,
@@ -184,8 +177,12 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
context,
);
} catch (err) {
- console.error("Failed to send message:", err);
+ console.error("[useChatContainer] Failed to send message:", err);
setIsStreamingInitiated(false);
+
+ // Don't show error toast for AbortError (expected during cleanup)
+ if (err instanceof Error && err.name === "AbortError") return;
+
const errorMessage =
err instanceof Error ? err.message : "Failed to send message";
toast.error("Failed to send message", {
@@ -196,6 +193,22 @@ export function useChatContainer({ sessionId, initialMessages }: Args) {
[sessionId, sendStreamMessage],
);
+ const { capturePageContext } = usePageContext();
+
+ // Send initial prompt if provided (for new sessions from homepage)
+ useEffect(
+ function handleInitialPrompt() {
+ if (!initialPrompt || !sessionId) return;
+ if (initialMessages.length > 0) return;
+ if (hasSentInitialPrompt(sessionId)) return;
+
+ markInitialPromptSent(sessionId);
+ const context = capturePageContext();
+ sendMessage(initialPrompt, true, context);
+ },
+ [initialPrompt, sessionId, initialMessages.length, sendMessage, capturePageContext],
+ );
+
return {
messages: allMessages,
streamingChunks,
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatInput/ChatInput.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatInput/ChatInput.tsx
index 390c8335a6..f81a8a8025 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatInput/ChatInput.tsx
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatInput/ChatInput.tsx
@@ -1,3 +1,4 @@
+import { Button } from "@/components/atoms/Button/Button";
import { Input } from "@/components/atoms/Input/Input";
import { cn } from "@/lib/utils";
import { ArrowUpIcon } from "@phosphor-icons/react";
@@ -24,41 +25,43 @@ export function ChatInput({
inputId,
});
- return (
-
-
setValue(e.target.value)}
- onKeyDown={handleKeyDown}
- placeholder={placeholder}
- disabled={disabled}
- rows={1}
- wrapperClassName="mb-0 relative"
- className="pr-12"
- />
-
- Press Enter to send, Shift+Enter for new line
-
+ function handleSubmit(e: React.FormEvent
) {
+ e.preventDefault();
+ handleSend();
+ }
-
-
+ return (
+
);
}
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/useMessageItem.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/useMessageItem.ts
index 9606d1d9e3..65c2e02cc8 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/useMessageItem.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/useMessageItem.ts
@@ -45,29 +45,8 @@ export function useMessageItem({
success: true,
timestamp: message.timestamp,
} as ChatMessageData;
-
- console.log(
- "[MessageItem] Converting assistant message to tool output:",
- {
- content: message.content.substring(0, 100),
- prevToolName: prevMessage.toolName,
- },
- );
}
}
-
- // Log for debugging
- if (message.type === "message" && message.role === "assistant") {
- const prevMessageToolName =
- prevMessage?.type === "tool_call" ? prevMessage.toolName : undefined;
- console.log("[MessageItem] Assistant message:", {
- index,
- content: message.content.substring(0, 200),
- fullContent: message.content,
- prevMessageType: prevMessage?.type,
- prevMessageToolName,
- });
- }
}
const isFinalMessage =
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/useChatSession.ts b/autogpt_platform/frontend/src/components/contextual/Chat/useChatSession.ts
index a54dc9e32a..091ca84938 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/useChatSession.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/useChatSession.ts
@@ -189,10 +189,7 @@ export function useChatSession({
const refreshSession = useCallback(
async function refreshSession() {
- if (!sessionId) {
- console.log("[refreshSession] Skipping - no session ID");
- return;
- }
+ if (!sessionId) return;
try {
setError(null);
await refetch();
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/useChatStream.ts b/autogpt_platform/frontend/src/components/contextual/Chat/useChatStream.ts
index c3e4fa752b..a47ea43fab 100644
--- a/autogpt_platform/frontend/src/components/contextual/Chat/useChatStream.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/useChatStream.ts
@@ -1,5 +1,5 @@
import type { ToolArguments, ToolResult } from "@/types/chat";
-import { useCallback, useEffect, useRef, useState } from "react";
+import { useCallback, useRef, useState } from "react";
import { toast } from "sonner";
const MAX_RETRIES = 3;
@@ -151,13 +151,14 @@ export function useChatStream() {
const abortControllerRef = useRef(null);
const stopStreaming = useCallback(() => {
- if (abortControllerRef.current) {
+ const controller = abortControllerRef.current;
+ if (controller) {
try {
- if (!abortControllerRef.current.signal.aborted) {
- abortControllerRef.current.abort();
+ if (!controller.signal.aborted) {
+ controller.abort();
}
} catch {
- // Ignore abort errors - signal may already be aborted or invalid
+ // Ignore abort errors
}
abortControllerRef.current = null;
}
@@ -168,12 +169,6 @@ export function useChatStream() {
setIsStreaming(false);
}, []);
- useEffect(() => {
- return () => {
- stopStreaming();
- };
- }, [stopStreaming]);
-
const sendMessage = useCallback(
async (
sessionId: string,
@@ -238,11 +233,9 @@ export function useChatStream() {
onChunk({ type: "stream_end" });
}
- const cleanup = () => {
- reader.cancel().catch(() => {
- // Ignore cancel errors
- });
- };
+ function cleanup() {
+ reader.cancel().catch(() => {});
+ }
async function readStream() {
try {
@@ -283,10 +276,8 @@ export function useChatStream() {
continue;
}
- // Call the chunk handler
onChunk(chunk);
- // Handle stream lifecycle
if (chunk.type === "stream_end") {
didDispatchStreamEnd = true;
cleanup();
@@ -303,9 +294,8 @@ export function useChatStream() {
);
return;
}
- } catch (err) {
+ } catch {
// Skip invalid JSON lines
- console.warn("Failed to parse SSE chunk:", err, data);
}
}
}
@@ -313,6 +303,9 @@ export function useChatStream() {
} catch (err) {
if (err instanceof Error && err.name === "AbortError") {
cleanup();
+ dispatchStreamEnd();
+ stopStreaming();
+ resolve();
return;
}
@@ -336,9 +329,7 @@ export function useChatStream() {
isUserMessage,
context,
true,
- ).catch((_err) => {
- // Retry failed
- });
+ ).catch(() => {});
}, retryDelay);
} else {
setError(streamError);
@@ -358,6 +349,10 @@ export function useChatStream() {
readStream();
});
} catch (err) {
+ if (err instanceof Error && err.name === "AbortError") {
+ setIsStreaming(false);
+ return Promise.resolve();
+ }
const streamError =
err instanceof Error ? err : new Error("Failed to start stream");
setError(streamError);
diff --git a/autogpt_platform/frontend/src/services/storage/session-storage.ts b/autogpt_platform/frontend/src/services/storage/session-storage.ts
new file mode 100644
index 0000000000..8404da571c
--- /dev/null
+++ b/autogpt_platform/frontend/src/services/storage/session-storage.ts
@@ -0,0 +1,40 @@
+import * as Sentry from "@sentry/nextjs";
+import { environment } from "../environment";
+
+export enum SessionKey {
+ CHAT_SENT_INITIAL_PROMPTS = "chat_sent_initial_prompts",
+}
+
+function get(key: SessionKey) {
+ if (environment.isServerSide()) {
+ Sentry.captureException(new Error("Session storage is not available"));
+ return;
+ }
+ try {
+ return window.sessionStorage.getItem(key);
+ } catch {
+ return;
+ }
+}
+
+function set(key: SessionKey, value: string) {
+ if (environment.isServerSide()) {
+ Sentry.captureException(new Error("Session storage is not available"));
+ return;
+ }
+ return window.sessionStorage.setItem(key, value);
+}
+
+function clean(key: SessionKey) {
+ if (environment.isServerSide()) {
+ Sentry.captureException(new Error("Session storage is not available"));
+ return;
+ }
+ return window.sessionStorage.removeItem(key);
+}
+
+export const sessionStorage = {
+ clean,
+ get,
+ set,
+};