From 7d4c020a9bd6b25dc4e25c2934e4ae6656af6257 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Tue, 10 Feb 2026 18:42:21 +0530 Subject: [PATCH] feat(chat): implement AI SDK integration with custom streaming response handling (#11901) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Changes 🏗️ - Added AI SDK integration for chat streaming with proper message handling - Implemented custom to_sse method in StreamToolOutputAvailable to exclude non-spec fields - Modified stream_chat_completion to reuse message IDs for tool call continuations - Created new Copilot 2.0 UI with AI SDK React components - Added streamdown and related packages for markdown rendering - Built reusable conversation and message components for the chat interface - Added support for tool output display in the chat UI ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Start a new chat session and verify streaming works correctly - [x] Test tool calls and verify they display properly in the UI - [x] Verify message continuations don't create duplicate messages - [x] Test markdown rendering with code blocks and other formatting - [x] Verify the UI is responsive and scrolls correctly #### For configuration changes: - [x] `.env.default` is updated or already compatible with my changes - [x] `docker-compose.yml` is updated or already compatible with my changes - [x] I have included a list of my configuration changes in the PR description (under **Changes**) --------- Co-authored-by: Lluis Agusti Co-authored-by: Ubbe --- .github/workflows/platform-fullstack-ci.yml | 4 +- .../api/features/chat/response_model.py | 47 +- .../backend/api/features/chat/routes.py | 250 +-- .../backend/api/features/chat/service.py | 36 +- .../api/features/chat/stream_registry.py | 4 + autogpt_platform/frontend/Dockerfile | 6 +- autogpt_platform/frontend/next.config.mjs | 51 +- autogpt_platform/frontend/package.json | 14 +- autogpt_platform/frontend/pnpm-lock.yaml | 1180 ++++++++++++- .../useBlockMenuSearchBar.ts | 2 +- .../NewBlockMenu/HorizontalScroll.tsx | 4 +- .../app/(platform)/copilot/CopilotPage.tsx | 76 + .../ChatContainer/ChatContainer.tsx | 74 + .../components/ChatInput/ChatInput.tsx | 17 +- .../ChatInput/components/AudioWaveform.tsx | 0 .../components/RecordingIndicator.tsx | 0 .../copilot}/components/ChatInput/helpers.ts | 0 .../components/ChatInput/useChatInput.ts | 44 +- .../components/ChatInput/useVoiceRecording.ts | 32 +- .../ChatMessagesContainer.tsx | 274 +++ .../components/ChatSidebar/ChatSidebar.tsx | 188 ++ .../CopilotChatActionsProvider.tsx | 16 + .../useCopilotChatActions.ts | 23 + .../components/CopilotShell/CopilotShell.tsx | 99 -- .../DesktopSidebar/DesktopSidebar.tsx | 70 - .../components/MobileDrawer/MobileDrawer.tsx | 91 - .../MobileDrawer/useMobileDrawer.ts | 24 - .../components/SessionsList/SessionsList.tsx | 80 - .../SessionsList/useSessionsPagination.ts | 91 - .../components/CopilotShell/helpers.ts | 106 -- .../CopilotShell/useCopilotShell.ts | 124 -- .../CopilotShell/useShellSessionList.ts | 113 -- .../components/EmptySession/EmptySession.tsx | 111 ++ .../{ => components/EmptySession}/helpers.ts | 51 +- .../components/MobileDrawer/MobileDrawer.tsx | 140 ++ .../MobileHeader/MobileHeader.tsx | 0 .../MorphingTextAnimation.tsx | 54 + .../OrbitLoader/OrbitLoader.module.css | 69 + .../components/OrbitLoader/OrbitLoader.tsx | 28 + .../components/ProgressBar/ProgressBar.tsx | 26 + .../PulseLoader/PulseLoader.module.css | 34 + .../components/PulseLoader/PulseLoader.tsx | 16 + .../SpinnerLoader/SpinnerLoader.module.css | 57 + .../SpinnerLoader/SpinnerLoader.tsx | 16 + .../ToolAccordion/AccordionContent.tsx | 235 +++ .../ToolAccordion/ToolAccordion.tsx | 102 ++ .../ToolAccordion/useToolAccordion.ts | 32 + .../(platform)/copilot/copilot-page-store.ts | 56 - .../helpers/convertChatSessionToUiMessages.ts | 128 ++ .../Untitled} | 2 +- .../copilot/hooks}/useAsymptoticProgress.ts | 7 +- .../src/app/(platform)/copilot/layout.tsx | 13 - .../src/app/(platform)/copilot/page.tsx | 150 +- .../(platform)/copilot/styleguide/page.tsx | 1533 +++++++++++++++++ .../copilot/tools/CreateAgent/CreateAgent.tsx | 237 +++ .../ClarificationQuestionsCard.tsx} | 14 +- .../copilot/tools/CreateAgent/helpers.tsx | 186 ++ .../copilot/tools/EditAgent/EditAgent.tsx | 234 +++ .../copilot/tools/EditAgent/helpers.tsx | 188 ++ .../copilot/tools/FindAgents/FindAgents.tsx | 127 ++ .../copilot/tools/FindAgents/helpers.tsx | 187 ++ .../copilot/tools/FindBlocks/FindBlocks.tsx | 92 + .../copilot/tools/FindBlocks/helpers.tsx | 75 + .../copilot/tools/RunAgent/RunAgent.tsx | 93 + .../AgentDetailsCard/AgentDetailsCard.tsx | 116 ++ .../components/AgentDetailsCard/helpers.ts | 8 + .../components/ErrorCard/ErrorCard.tsx | 27 + .../ExecutionStartedCard.tsx | 39 + .../SetupRequirementsCard.tsx | 105 ++ .../SetupRequirementsCard/helpers.ts | 116 ++ .../copilot/tools/RunAgent/helpers.tsx | 248 +++ .../copilot/tools/RunBlock/RunBlock.tsx | 76 + .../BlockOutputCard/BlockOutputCard.tsx | 133 ++ .../components/ErrorCard/ErrorCard.tsx | 27 + .../SetupRequirementsCard.tsx | 197 +++ .../SetupRequirementsCard/helpers.ts | 156 ++ .../copilot/tools/RunBlock/helpers.tsx | 185 ++ .../copilot/tools/SearchDocs/SearchDocs.tsx | 186 ++ .../copilot/tools/SearchDocs/helpers.tsx | 215 +++ .../tools/ViewAgentOutput/ViewAgentOutput.tsx | 261 +++ .../copilot/tools/ViewAgentOutput/helpers.tsx | 158 ++ .../app/(platform)/copilot/useChatSession.ts | 109 ++ .../app/(platform)/copilot/useCopilotPage.ts | 221 +-- .../RunDetailHeader/RunDetailHeader.tsx | 9 +- .../components/ScheduleListItem.tsx | 6 +- .../components/TaskListItem.tsx | 8 +- .../components/TemplateListItem.tsx | 6 +- .../components/TriggerListItem.tsx | 4 +- .../components/agent-run-details-view.tsx | 8 +- .../components/agent-run-summary-card.tsx | 8 +- .../LibrarySearchBar/useLibrarySearchbar.tsx | 2 +- .../monitoring/components/AgentFlowList.tsx | 10 +- .../monitoring/components/FlowRunInfo.tsx | 10 +- .../monitoring/components/FlowRunsList.tsx | 6 +- .../components/FlowRunsTimeline.tsx | 16 +- .../chat/sessions/[sessionId]/stream/route.ts | 33 +- .../frontend/src/app/api/openapi.json | 957 +++++++++- autogpt_platform/frontend/src/app/globals.css | 17 + .../components/ai-elements/conversation.tsx | 109 ++ .../src/components/ai-elements/message.tsx | 338 ++++ .../atoms/OverflowText/OverflowText.tsx | 2 +- .../src/components/atoms/Text/Text.tsx | 3 +- .../src/components/contextual/Chat/Chat.tsx | 114 -- .../contextual/Chat/SSE_RECONNECTION.md | 159 -- .../contextual/Chat/chat-constants.ts | 16 - .../components/contextual/Chat/chat-store.ts | 501 ------ .../components/contextual/Chat/chat-types.ts | 163 -- .../components/AIChatBubble/AIChatBubble.tsx | 15 - .../AgentCarouselMessage.tsx | 119 -- .../AgentInputsSetup/AgentInputsSetup.tsx | 246 --- .../AgentInputsSetup/useAgentInputsSetup.ts | 38 - .../AuthPromptWidget/AuthPromptWidget.tsx | 120 -- .../ChatContainer/ChatContainer.tsx | 130 -- .../createStreamEventDispatcher.ts | 89 - .../Chat/components/ChatContainer/handlers.ts | 362 ---- .../Chat/components/ChatContainer/helpers.ts | 607 ------- .../ChatContainer/useChatContainer.ts | 517 ------ .../ChatCredentialsSetup.tsx | 151 -- .../useChatCredentialsSetup.ts | 36 - .../ChatErrorState/ChatErrorState.tsx | 30 - .../Chat/components/ChatLoader/ChatLoader.tsx | 7 - .../ChatLoadingState/ChatLoadingState.tsx | 19 - .../components/ChatMessage/ChatMessage.tsx | 448 ----- .../components/ChatMessage/useChatMessage.ts | 157 -- .../ExecutionStartedMessage.tsx | 90 - .../MarkdownContent/MarkdownContent.tsx | 349 ---- .../MessageBubble/MessageBubble.tsx | 53 - .../components/MessageList/MessageList.tsx | 115 -- .../LastToolResponse/LastToolResponse.tsx | 30 - .../components/MessageItem/MessageItem.tsx | 40 - .../components/MessageItem/useMessageItem.ts | 62 - .../Chat/components/MessageList/helpers.ts | 68 - .../components/MessageList/useMessageList.ts | 28 - .../NoResultsMessage/NoResultsMessage.tsx | 64 - .../PendingOperationWidget.tsx | 109 -- .../QuickActionsWelcome.tsx | 94 - .../SessionsDrawer/SessionsDrawer.tsx | 136 -- .../StreamingMessage/StreamingMessage.tsx | 35 - .../StreamingMessage/useStreamingMessage.ts | 25 - .../ThinkingMessage/ThinkingMessage.tsx | 82 - .../ToolCallMessage/ToolCallMessage.tsx | 55 - .../components/ToolCallMessage/helpers.ts | 184 -- .../AgentCreatedPrompt.tsx | 128 -- .../ToolResponseMessage.tsx | 69 - .../components/ToolResponseMessage/helpers.ts | 461 ----- .../UserChatBubble/UserChatBubble.tsx | 25 - .../contextual/Chat/stream-executor.ts | 255 --- .../contextual/Chat/stream-utils.ts | 89 - .../src/components/contextual/Chat/useChat.ts | 100 -- .../contextual/Chat/useChatSession.ts | 385 ----- .../contextual/Chat/useChatStream.ts | 115 -- .../contextual/Chat/usePageContext.ts | 98 -- .../src/components/layout/Navbar/Navbar.tsx | 2 +- .../renderers/InputRenderer/FormRenderer.tsx | 2 - .../InputRenderer/base/anyof/AnyOfField.tsx | 2 +- .../src/components/ui/button-group.tsx | 83 + .../frontend/src/components/ui/button.tsx | 59 + .../frontend/src/components/ui/input.tsx | 22 + .../frontend/src/components/ui/separator.tsx | 31 + .../frontend/src/components/ui/sheet.tsx | 143 ++ .../frontend/src/components/ui/sidebar.tsx | 778 +++++++++ .../frontend/src/components/ui/skeleton.tsx | 18 + .../frontend/src/components/ui/tooltip.tsx | 32 + .../frontend/src/hooks/use-mobile.tsx | 21 + autogpt_platform/frontend/src/lib/utils.ts | 2 +- .../frontend/src/tests/signin.spec.ts | 2 +- autogpt_platform/frontend/tailwind.config.ts | 10 + 167 files changed, 11359 insertions(+), 8804 deletions(-) create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/CopilotPage.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ChatContainer/ChatContainer.tsx rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/ChatInput.tsx (93%) rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/components/AudioWaveform.tsx (100%) rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/components/RecordingIndicator.tsx (100%) rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/helpers.ts (100%) rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/useChatInput.ts (83%) rename autogpt_platform/frontend/src/{components/contextual/Chat => app/(platform)/copilot}/components/ChatInput/useVoiceRecording.ts (86%) create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ChatMessagesContainer/ChatMessagesContainer.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ChatSidebar/ChatSidebar.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/CopilotChatActionsProvider.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/useCopilotChatActions.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/CopilotShell.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/DesktopSidebar/DesktopSidebar.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/MobileDrawer.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/useMobileDrawer.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/SessionsList.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/useSessionsPagination.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/helpers.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useCopilotShell.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useShellSessionList.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/EmptySession.tsx rename autogpt_platform/frontend/src/app/(platform)/copilot/{ => components/EmptySession}/helpers.ts (72%) create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileDrawer/MobileDrawer.tsx rename autogpt_platform/frontend/src/app/(platform)/copilot/components/{CopilotShell/components => }/MobileHeader/MobileHeader.tsx (100%) create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/MorphingTextAnimation/MorphingTextAnimation.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.module.css create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ProgressBar/ProgressBar.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.module.css create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.module.css create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/AccordionContent.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/ToolAccordion.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/useToolAccordion.ts delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/copilot-page-store.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/helpers/convertChatSessionToUiMessages.ts rename autogpt_platform/frontend/src/app/(platform)/copilot/{useCopilotSessionId.ts => hooks/Untitled} (99%) rename autogpt_platform/frontend/src/{components/contextual/Chat/components/ToolCallMessage => app/(platform)/copilot/hooks}/useAsymptoticProgress.ts (83%) delete mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/layout.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/styleguide/page.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/CreateAgent.tsx rename autogpt_platform/frontend/src/{components/contextual/Chat/components/ClarificationQuestionsWidget/ClarificationQuestionsWidget.tsx => app/(platform)/copilot/tools/CreateAgent/components/ClarificationQuestionsCard.tsx} (98%) create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/EditAgent.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/FindAgents.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/FindBlocks.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/RunAgent.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/AgentDetailsCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/helpers.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ErrorCard/ErrorCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ExecutionStartedCard/ExecutionStartedCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/SetupRequirementsCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/helpers.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/RunBlock.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/BlockOutputCard/BlockOutputCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/ErrorCard/ErrorCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/SetupRequirementsCard/SetupRequirementsCard.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/SetupRequirementsCard/helpers.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/SearchDocs/SearchDocs.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/SearchDocs/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/ViewAgentOutput/ViewAgentOutput.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/tools/ViewAgentOutput/helpers.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/copilot/useChatSession.ts create mode 100644 autogpt_platform/frontend/src/components/ai-elements/conversation.tsx create mode 100644 autogpt_platform/frontend/src/components/ai-elements/message.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/SSE_RECONNECTION.md delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/chat-constants.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/ChatCredentialsSetup.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/useChatCredentialsSetup.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatErrorState/ChatErrorState.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoader/ChatLoader.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoadingState/ChatLoadingState.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/ChatMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/useChatMessage.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ExecutionStartedMessage/ExecutionStartedMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MarkdownContent/MarkdownContent.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageBubble/MessageBubble.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/MessageList.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/LastToolResponse/LastToolResponse.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/MessageItem.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/components/MessageItem/useMessageItem.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/helpers.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/MessageList/useMessageList.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/NoResultsMessage/NoResultsMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/PendingOperationWidget/PendingOperationWidget.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/QuickActionsWelcome/QuickActionsWelcome.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/SessionsDrawer/SessionsDrawer.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/StreamingMessage/StreamingMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/StreamingMessage/useStreamingMessage.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ThinkingMessage/ThinkingMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ToolCallMessage/ToolCallMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ToolCallMessage/helpers.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ToolResponseMessage/AgentCreatedPrompt.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ToolResponseMessage/ToolResponseMessage.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/ToolResponseMessage/helpers.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/components/UserChatBubble/UserChatBubble.tsx delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/stream-executor.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/stream-utils.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/useChat.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/useChatSession.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/useChatStream.ts delete mode 100644 autogpt_platform/frontend/src/components/contextual/Chat/usePageContext.ts create mode 100644 autogpt_platform/frontend/src/components/ui/button-group.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/button.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/input.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/separator.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/sheet.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/sidebar.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/skeleton.tsx create mode 100644 autogpt_platform/frontend/src/components/ui/tooltip.tsx create mode 100644 autogpt_platform/frontend/src/hooks/use-mobile.tsx diff --git a/.github/workflows/platform-fullstack-ci.yml b/.github/workflows/platform-fullstack-ci.yml index 67be0ae939..ab483b98af 100644 --- a/.github/workflows/platform-fullstack-ci.yml +++ b/.github/workflows/platform-fullstack-ci.yml @@ -56,7 +56,7 @@ jobs: run: pnpm install --frozen-lockfile types: - runs-on: ubuntu-latest + runs-on: big-boi needs: setup strategy: fail-fast: false @@ -85,7 +85,7 @@ jobs: - name: Run docker compose run: | - docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d + docker compose -f ../docker-compose.yml --profile local up -d deps_backend - name: Restore dependencies cache uses: actions/cache@v5 diff --git a/autogpt_platform/backend/backend/api/features/chat/response_model.py b/autogpt_platform/backend/backend/api/features/chat/response_model.py index f627a42fcc..1ae836f7d1 100644 --- a/autogpt_platform/backend/backend/api/features/chat/response_model.py +++ b/autogpt_platform/backend/backend/api/features/chat/response_model.py @@ -18,6 +18,10 @@ class ResponseType(str, Enum): START = "start" FINISH = "finish" + # Step lifecycle (one LLM API call within a message) + START_STEP = "start-step" + FINISH_STEP = "finish-step" + # Text streaming TEXT_START = "text-start" TEXT_DELTA = "text-delta" @@ -57,6 +61,16 @@ class StreamStart(StreamBaseResponse): description="Task ID for SSE reconnection. Clients can reconnect using GET /tasks/{taskId}/stream", ) + def to_sse(self) -> str: + """Convert to SSE format, excluding non-protocol fields like taskId.""" + import json + + data: dict[str, Any] = { + "type": self.type.value, + "messageId": self.messageId, + } + return f"data: {json.dumps(data)}\n\n" + class StreamFinish(StreamBaseResponse): """End of message/stream.""" @@ -64,6 +78,26 @@ class StreamFinish(StreamBaseResponse): type: ResponseType = ResponseType.FINISH +class StreamStartStep(StreamBaseResponse): + """Start of a step (one LLM API call within a message). + + The AI SDK uses this to add a step-start boundary to message.parts, + enabling visual separation between multiple LLM calls in a single message. + """ + + type: ResponseType = ResponseType.START_STEP + + +class StreamFinishStep(StreamBaseResponse): + """End of a step (one LLM API call within a message). + + The AI SDK uses this to reset activeTextParts and activeReasoningParts, + so the next LLM call in a tool-call continuation starts with clean state. + """ + + type: ResponseType = ResponseType.FINISH_STEP + + # ========== Text Streaming ========== @@ -117,7 +151,7 @@ class StreamToolOutputAvailable(StreamBaseResponse): type: ResponseType = ResponseType.TOOL_OUTPUT_AVAILABLE toolCallId: str = Field(..., description="Tool call ID this responds to") output: str | dict[str, Any] = Field(..., description="Tool execution output") - # Additional fields for internal use (not part of AI SDK spec but useful) + # Keep these for internal backend use toolName: str | None = Field( default=None, description="Name of the tool that was executed" ) @@ -125,6 +159,17 @@ class StreamToolOutputAvailable(StreamBaseResponse): default=True, description="Whether the tool execution succeeded" ) + def to_sse(self) -> str: + """Convert to SSE format, excluding non-spec fields.""" + import json + + data = { + "type": self.type.value, + "toolCallId": self.toolCallId, + "output": self.output, + } + return f"data: {json.dumps(data)}\n\n" + # ========== Other ========== diff --git a/autogpt_platform/backend/backend/api/features/chat/routes.py b/autogpt_platform/backend/backend/api/features/chat/routes.py index 74e6e8ba1e..c6f37569b7 100644 --- a/autogpt_platform/backend/backend/api/features/chat/routes.py +++ b/autogpt_platform/backend/backend/api/features/chat/routes.py @@ -6,7 +6,7 @@ from collections.abc import AsyncGenerator from typing import Annotated from autogpt_libs import auth -from fastapi import APIRouter, Depends, Header, HTTPException, Query, Security +from fastapi import APIRouter, Depends, Header, HTTPException, Query, Response, Security from fastapi.responses import StreamingResponse from pydantic import BaseModel @@ -17,7 +17,29 @@ from . import stream_registry from .completion_handler import process_operation_failure, process_operation_success from .config import ChatConfig from .model import ChatSession, create_chat_session, get_chat_session, get_user_sessions -from .response_model import StreamFinish, StreamHeartbeat, StreamStart +from .response_model import StreamFinish, StreamHeartbeat +from .tools.models import ( + AgentDetailsResponse, + AgentOutputResponse, + AgentPreviewResponse, + AgentSavedResponse, + AgentsFoundResponse, + BlockListResponse, + BlockOutputResponse, + ClarificationNeededResponse, + DocPageResponse, + DocSearchResultsResponse, + ErrorResponse, + ExecutionStartedResponse, + InputValidationErrorResponse, + NeedLoginResponse, + NoResultsResponse, + OperationInProgressResponse, + OperationPendingResponse, + OperationStartedResponse, + SetupRequirementsResponse, + UnderstandingUpdatedResponse, +) config = ChatConfig() @@ -269,8 +291,6 @@ async def stream_chat_post( import time stream_start_time = time.perf_counter() - - # Base log metadata (task_id added after creation) log_meta = {"component": "ChatStream", "session_id": session_id} if user_id: log_meta["user_id"] = user_id @@ -328,24 +348,6 @@ async def stream_chat_post( first_chunk_time, ttfc = None, None chunk_count = 0 try: - # Emit a start event with task_id for reconnection - start_chunk = StreamStart(messageId=task_id, taskId=task_id) - await stream_registry.publish_chunk(task_id, start_chunk) - logger.info( - f"[TIMING] StreamStart published at {(time_module.perf_counter() - gen_start_time)*1000:.1f}ms", - extra={ - "json_fields": { - **log_meta, - "elapsed_ms": (time_module.perf_counter() - gen_start_time) - * 1000, - } - }, - ) - - logger.info( - "[TIMING] Calling stream_chat_completion", - extra={"json_fields": log_meta}, - ) async for chunk in chat_service.stream_chat_completion( session_id, request.message, @@ -353,6 +355,7 @@ async def stream_chat_post( user_id=user_id, session=session, # Pass pre-fetched session to avoid double-fetch context=request.context, + _task_id=task_id, # Pass task_id so service emits start with taskId for reconnection ): chunk_count += 1 if first_chunk_time is None: @@ -388,7 +391,6 @@ async def stream_chat_post( } }, ) - await stream_registry.mark_task_completed(task_id, "completed") except Exception as e: elapsed = time_module.perf_counter() - gen_start_time @@ -428,34 +430,13 @@ async def stream_chat_post( chunks_yielded = 0 try: # Subscribe to the task stream (this replays existing messages + live updates) - subscribe_start = time_module.perf_counter() - logger.info( - "[TIMING] Calling subscribe_to_task", - extra={"json_fields": log_meta}, - ) subscriber_queue = await stream_registry.subscribe_to_task( task_id=task_id, user_id=user_id, last_message_id="0-0", # Get all messages from the beginning ) - subscribe_time = (time_module.perf_counter() - subscribe_start) * 1000 - logger.info( - f"[TIMING] subscribe_to_task completed in {subscribe_time:.1f}ms, " - f"queue_ok={subscriber_queue is not None}", - extra={ - "json_fields": { - **log_meta, - "duration_ms": subscribe_time, - "queue_obtained": subscriber_queue is not None, - } - }, - ) if subscriber_queue is None: - logger.info( - "[TIMING] subscriber_queue is None, yielding finish", - extra={"json_fields": log_meta}, - ) yield StreamFinish().to_sse() yield "data: [DONE]\n\n" return @@ -467,11 +448,7 @@ async def stream_chat_post( ) while True: try: - queue_wait_start = time_module.perf_counter() chunk = await asyncio.wait_for(subscriber_queue.get(), timeout=30.0) - queue_wait_time = ( - time_module.perf_counter() - queue_wait_start - ) * 1000 chunks_yielded += 1 if not first_chunk_yielded: @@ -479,26 +456,12 @@ async def stream_chat_post( elapsed = time_module.perf_counter() - event_gen_start logger.info( f"[TIMING] FIRST CHUNK from queue at {elapsed:.2f}s, " - f"type={type(chunk).__name__}, " - f"wait={queue_wait_time:.1f}ms", + f"type={type(chunk).__name__}", extra={ "json_fields": { **log_meta, "chunk_type": type(chunk).__name__, "elapsed_ms": elapsed * 1000, - "queue_wait_ms": queue_wait_time, - } - }, - ) - elif chunks_yielded % 50 == 0: - logger.info( - f"[TIMING] Chunk #{chunks_yielded}, " - f"type={type(chunk).__name__}", - extra={ - "json_fields": { - **log_meta, - "chunk_number": chunks_yielded, - "chunk_type": type(chunk).__name__, } }, ) @@ -521,13 +484,6 @@ async def stream_chat_post( ) break except asyncio.TimeoutError: - # Send heartbeat to keep connection alive - logger.info( - f"[TIMING] Heartbeat timeout, chunks_so_far={chunks_yielded}", - extra={ - "json_fields": {**log_meta, "chunks_so_far": chunks_yielded} - }, - ) yield StreamHeartbeat().to_sse() except GeneratorExit: @@ -592,63 +548,90 @@ async def stream_chat_post( @router.get( "/sessions/{session_id}/stream", ) -async def stream_chat_get( +async def resume_session_stream( session_id: str, - message: Annotated[str, Query(min_length=1, max_length=10000)], user_id: str | None = Depends(auth.get_user_id), - is_user_message: bool = Query(default=True), ): """ - Stream chat responses for a session (GET - legacy endpoint). + Resume an active stream for a session. - Streams the AI/completion responses in real time over Server-Sent Events (SSE), including: - - Text fragments as they are generated - - Tool call UI elements (if invoked) - - Tool execution results + Called by the AI SDK's ``useChat(resume: true)`` on page load. + Checks for an active (in-progress) task on the session and either replays + the full SSE stream or returns 204 No Content if nothing is running. Args: - session_id: The chat session identifier to associate with the streamed messages. - message: The user's new message to process. + session_id: The chat session identifier. user_id: Optional authenticated user ID. - is_user_message: Whether the message is a user message. - Returns: - StreamingResponse: SSE-formatted response chunks. + Returns: + StreamingResponse (SSE) when an active stream exists, + or 204 No Content when there is nothing to resume. """ - session = await _validate_and_get_session(session_id, user_id) + import asyncio + + active_task, _last_id = await stream_registry.get_active_task_for_session( + session_id, user_id + ) + + if not active_task: + return Response(status_code=204) + + subscriber_queue = await stream_registry.subscribe_to_task( + task_id=active_task.task_id, + user_id=user_id, + last_message_id="0-0", # Full replay so useChat rebuilds the message + ) + + if subscriber_queue is None: + return Response(status_code=204) async def event_generator() -> AsyncGenerator[str, None]: chunk_count = 0 first_chunk_type: str | None = None - async for chunk in chat_service.stream_chat_completion( - session_id, - message, - is_user_message=is_user_message, - user_id=user_id, - session=session, # Pass pre-fetched session to avoid double-fetch - ): - if chunk_count < 3: - logger.info( - "Chat stream chunk", - extra={ - "session_id": session_id, - "chunk_type": str(chunk.type), - }, + try: + while True: + try: + chunk = await asyncio.wait_for(subscriber_queue.get(), timeout=30.0) + if chunk_count < 3: + logger.info( + "Resume stream chunk", + extra={ + "session_id": session_id, + "chunk_type": str(chunk.type), + }, + ) + if not first_chunk_type: + first_chunk_type = str(chunk.type) + chunk_count += 1 + yield chunk.to_sse() + + if isinstance(chunk, StreamFinish): + break + except asyncio.TimeoutError: + yield StreamHeartbeat().to_sse() + except GeneratorExit: + pass + except Exception as e: + logger.error(f"Error in resume stream for session {session_id}: {e}") + finally: + try: + await stream_registry.unsubscribe_from_task( + active_task.task_id, subscriber_queue ) - if not first_chunk_type: - first_chunk_type = str(chunk.type) - chunk_count += 1 - yield chunk.to_sse() - logger.info( - "Chat stream completed", - extra={ - "session_id": session_id, - "n_chunks": chunk_count, - "first_chunk_type": first_chunk_type, - }, - ) - # AI SDK protocol termination - yield "data: [DONE]\n\n" + except Exception as unsub_err: + logger.error( + f"Error unsubscribing from task {active_task.task_id}: {unsub_err}", + exc_info=True, + ) + logger.info( + "Resume stream completed", + extra={ + "session_id": session_id, + "n_chunks": chunk_count, + "first_chunk_type": first_chunk_type, + }, + ) + yield "data: [DONE]\n\n" return StreamingResponse( event_generator(), @@ -656,8 +639,8 @@ async def stream_chat_get( headers={ "Cache-Control": "no-cache", "Connection": "keep-alive", - "X-Accel-Buffering": "no", # Disable nginx buffering - "x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header + "X-Accel-Buffering": "no", + "x-vercel-ai-ui-message-stream": "v1", }, ) @@ -969,3 +952,42 @@ async def health_check() -> dict: "service": "chat", "version": "0.1.0", } + + +# ========== Schema Export (for OpenAPI / Orval codegen) ========== + +ToolResponseUnion = ( + AgentsFoundResponse + | NoResultsResponse + | AgentDetailsResponse + | SetupRequirementsResponse + | ExecutionStartedResponse + | NeedLoginResponse + | ErrorResponse + | InputValidationErrorResponse + | AgentOutputResponse + | UnderstandingUpdatedResponse + | AgentPreviewResponse + | AgentSavedResponse + | ClarificationNeededResponse + | BlockListResponse + | BlockOutputResponse + | DocSearchResultsResponse + | DocPageResponse + | OperationStartedResponse + | OperationPendingResponse + | OperationInProgressResponse +) + + +@router.get( + "/schema/tool-responses", + response_model=ToolResponseUnion, + include_in_schema=True, + summary="[Dummy] Tool response type export for codegen", + description="This endpoint is not meant to be called. It exists solely to " + "expose tool response models in the OpenAPI schema for frontend codegen.", +) +async def _tool_response_schema() -> ToolResponseUnion: # type: ignore[return] + """Never called at runtime. Exists only so Orval generates TS types.""" + raise HTTPException(status_code=501, detail="Schema-only endpoint") diff --git a/autogpt_platform/backend/backend/api/features/chat/service.py b/autogpt_platform/backend/backend/api/features/chat/service.py index da18421b98..49e70265fa 100644 --- a/autogpt_platform/backend/backend/api/features/chat/service.py +++ b/autogpt_platform/backend/backend/api/features/chat/service.py @@ -52,8 +52,10 @@ from .response_model import ( StreamBaseResponse, StreamError, StreamFinish, + StreamFinishStep, StreamHeartbeat, StreamStart, + StreamStartStep, StreamTextDelta, StreamTextEnd, StreamTextStart, @@ -351,6 +353,10 @@ async def stream_chat_completion( retry_count: int = 0, session: ChatSession | None = None, context: dict[str, str] | None = None, # {url: str, content: str} + _continuation_message_id: ( + str | None + ) = None, # Internal: reuse message ID for tool call continuations + _task_id: str | None = None, # Internal: task ID for SSE reconnection support ) -> AsyncGenerator[StreamBaseResponse, None]: """Main entry point for streaming chat completions with database handling. @@ -517,16 +523,21 @@ async def stream_chat_completion( # Generate unique IDs for AI SDK protocol import uuid as uuid_module - message_id = str(uuid_module.uuid4()) + is_continuation = _continuation_message_id is not None + message_id = _continuation_message_id or str(uuid_module.uuid4()) text_block_id = str(uuid_module.uuid4()) - # Yield message start + # Only yield message start for the initial call, not for continuations. setup_time = (time.monotonic() - completion_start) * 1000 logger.info( f"[TIMING] Setup complete, yielding StreamStart at {setup_time:.1f}ms", extra={"json_fields": {**log_meta, "setup_time_ms": setup_time}}, ) - yield StreamStart(messageId=message_id) + if not is_continuation: + yield StreamStart(messageId=message_id, taskId=_task_id) + + # Emit start-step before each LLM call (AI SDK uses this to add step boundaries) + yield StreamStartStep() try: logger.info( @@ -632,6 +643,10 @@ async def stream_chat_completion( ) yield chunk elif isinstance(chunk, StreamFinish): + if has_done_tool_call: + # Tool calls happened — close the step but don't send message-level finish. + # The continuation will open a new step, and finish will come at the end. + yield StreamFinishStep() if not has_done_tool_call: # Emit text-end before finish if we received text but haven't closed it if has_received_text and not text_streaming_ended: @@ -663,6 +678,8 @@ async def stream_chat_completion( has_saved_assistant_message = True has_yielded_end = True + # Emit finish-step before finish (resets AI SDK text/reasoning state) + yield StreamFinishStep() yield chunk elif isinstance(chunk, StreamError): has_yielded_error = True @@ -712,6 +729,10 @@ async def stream_chat_completion( logger.info( f"Retryable error encountered. Attempt {retry_count + 1}/{config.max_retries}" ) + # Close the current step before retrying so the recursive call's + # StreamStartStep doesn't produce unbalanced step events. + if not has_yielded_end: + yield StreamFinishStep() should_retry = True else: # Non-retryable error or max retries exceeded @@ -747,6 +768,7 @@ async def stream_chat_completion( error_response = StreamError(errorText=error_message) yield error_response if not has_yielded_end: + yield StreamFinishStep() yield StreamFinish() return @@ -761,6 +783,8 @@ async def stream_chat_completion( retry_count=retry_count + 1, session=session, context=context, + _continuation_message_id=message_id, # Reuse message ID since start was already sent + _task_id=_task_id, ): yield chunk return # Exit after retry to avoid double-saving in finally block @@ -830,6 +854,8 @@ async def stream_chat_completion( session=session, # Pass session object to avoid Redis refetch context=context, tool_call_response=str(tool_response_messages), + _continuation_message_id=message_id, # Reuse message ID to avoid duplicates + _task_id=_task_id, ): yield chunk @@ -1686,6 +1712,7 @@ async def _execute_long_running_tool_with_streaming( task_id, StreamError(errorText=str(e)), ) + await stream_registry.publish_chunk(task_id, StreamFinishStep()) await stream_registry.publish_chunk(task_id, StreamFinish()) await _update_pending_operation( @@ -1943,6 +1970,7 @@ async def _generate_llm_continuation_with_streaming( # Publish start event await stream_registry.publish_chunk(task_id, StreamStart(messageId=message_id)) + await stream_registry.publish_chunk(task_id, StreamStartStep()) await stream_registry.publish_chunk(task_id, StreamTextStart(id=text_block_id)) # Stream the response @@ -1966,6 +1994,7 @@ async def _generate_llm_continuation_with_streaming( # Publish end events await stream_registry.publish_chunk(task_id, StreamTextEnd(id=text_block_id)) + await stream_registry.publish_chunk(task_id, StreamFinishStep()) if assistant_content: # Reload session from DB to avoid race condition with user messages @@ -2007,4 +2036,5 @@ async def _generate_llm_continuation_with_streaming( task_id, StreamError(errorText=f"Failed to generate response: {e}"), ) + await stream_registry.publish_chunk(task_id, StreamFinishStep()) await stream_registry.publish_chunk(task_id, StreamFinish()) diff --git a/autogpt_platform/backend/backend/api/features/chat/stream_registry.py b/autogpt_platform/backend/backend/api/features/chat/stream_registry.py index 509d20d9f4..abc34b1fc9 100644 --- a/autogpt_platform/backend/backend/api/features/chat/stream_registry.py +++ b/autogpt_platform/backend/backend/api/features/chat/stream_registry.py @@ -857,8 +857,10 @@ def _reconstruct_chunk(chunk_data: dict) -> StreamBaseResponse | None: ResponseType, StreamError, StreamFinish, + StreamFinishStep, StreamHeartbeat, StreamStart, + StreamStartStep, StreamTextDelta, StreamTextEnd, StreamTextStart, @@ -872,6 +874,8 @@ def _reconstruct_chunk(chunk_data: dict) -> StreamBaseResponse | None: type_to_class: dict[str, type[StreamBaseResponse]] = { ResponseType.START.value: StreamStart, ResponseType.FINISH.value: StreamFinish, + ResponseType.START_STEP.value: StreamStartStep, + ResponseType.FINISH_STEP.value: StreamFinishStep, ResponseType.TEXT_START.value: StreamTextStart, ResponseType.TEXT_DELTA.value: StreamTextDelta, ResponseType.TEXT_END.value: StreamTextEnd, diff --git a/autogpt_platform/frontend/Dockerfile b/autogpt_platform/frontend/Dockerfile index 2b120af5e1..ab2708f1f9 100644 --- a/autogpt_platform/frontend/Dockerfile +++ b/autogpt_platform/frontend/Dockerfile @@ -25,8 +25,12 @@ RUN if [ -f .env.production ]; then \ cp .env.default .env; \ fi RUN pnpm run generate:api +# Disable source-map generation in Docker builds to halve webpack memory usage. +# Source maps are only useful when SENTRY_AUTH_TOKEN is set (Vercel deploys); +# the Docker image never uploads them, so generating them just wastes RAM. +ENV NEXT_PUBLIC_SOURCEMAPS="false" # In CI, we want NEXT_PUBLIC_PW_TEST=true during build so Next.js inlines it -RUN if [ "$NEXT_PUBLIC_PW_TEST" = "true" ]; then NEXT_PUBLIC_PW_TEST=true NODE_OPTIONS="--max-old-space-size=4096" pnpm build; else NODE_OPTIONS="--max-old-space-size=4096" pnpm build; fi +RUN if [ "$NEXT_PUBLIC_PW_TEST" = "true" ]; then NEXT_PUBLIC_PW_TEST=true NODE_OPTIONS="--max-old-space-size=8192" pnpm build; else NODE_OPTIONS="--max-old-space-size=8192" pnpm build; fi # Prod stage - based on NextJS reference Dockerfile https://github.com/vercel/next.js/blob/64271354533ed16da51be5dce85f0dbd15f17517/examples/with-docker/Dockerfile FROM node:21-alpine AS prod diff --git a/autogpt_platform/frontend/next.config.mjs b/autogpt_platform/frontend/next.config.mjs index bb4410039d..9bb5983801 100644 --- a/autogpt_platform/frontend/next.config.mjs +++ b/autogpt_platform/frontend/next.config.mjs @@ -1,8 +1,12 @@ import { withSentryConfig } from "@sentry/nextjs"; +// Allow Docker builds to skip source-map generation (halves memory usage). +// Defaults to true so Vercel/local builds are unaffected. +const enableSourceMaps = process.env.NEXT_PUBLIC_SOURCEMAPS !== "false"; + /** @type {import('next').NextConfig} */ const nextConfig = { - productionBrowserSourceMaps: true, + productionBrowserSourceMaps: enableSourceMaps, // Externalize OpenTelemetry packages to fix Turbopack HMR issues serverExternalPackages: [ "@opentelemetry/instrumentation", @@ -14,9 +18,37 @@ const nextConfig = { serverActions: { bodySizeLimit: "256mb", }, - // Increase body size limit for API routes (file uploads) - 256MB to match backend limit - proxyClientMaxBodySize: "256mb", middlewareClientMaxBodySize: "256mb", + // Limit parallel webpack workers to reduce peak memory during builds. + cpus: 2, + }, + // Work around cssnano "Invalid array length" bug in Next.js's bundled + // cssnano-simple comment parser when processing very large CSS chunks. + // CSS is still bundled correctly; gzip handles most of the size savings anyway. + webpack: (config, { dev }) => { + if (!dev) { + // Next.js adds CssMinimizerPlugin internally (after user config), so we + // can't filter it from config.plugins. Instead, intercept the webpack + // compilation hooks and replace the buggy plugin's tap with a no-op. + config.plugins.push({ + apply(compiler) { + compiler.hooks.compilation.tap( + "DisableCssMinimizer", + (compilation) => { + compilation.hooks.processAssets.intercept({ + register: (tap) => { + if (tap.name === "CssMinimizerPlugin") { + return { ...tap, fn: async () => {} }; + } + return tap; + }, + }); + }, + ); + }, + }); + } + return config; }, images: { domains: [ @@ -54,9 +86,16 @@ const nextConfig = { transpilePackages: ["geist"], }; -const isDevelopmentBuild = process.env.NODE_ENV !== "production"; +// Only run the Sentry webpack plugin when we can actually upload source maps +// (i.e. on Vercel with SENTRY_AUTH_TOKEN set). The Sentry *runtime* SDK +// (imported in app code) still captures errors without the plugin. +// Skipping the plugin saves ~1 GB of peak memory during `next build`. +const skipSentryPlugin = + process.env.NODE_ENV !== "production" || + !enableSourceMaps || + !process.env.SENTRY_AUTH_TOKEN; -export default isDevelopmentBuild +export default skipSentryPlugin ? nextConfig : withSentryConfig(nextConfig, { // For all available options, see: @@ -96,7 +135,7 @@ export default isDevelopmentBuild // This helps Sentry with sourcemaps... https://docs.sentry.io/platforms/javascript/guides/nextjs/sourcemaps/ sourcemaps: { - disable: false, + disable: !enableSourceMaps, assets: [".next/**/*.js", ".next/**/*.js.map"], ignore: ["**/node_modules/**"], deleteSourcemapsAfterUpload: false, // Source is public anyway :) diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index e8c9871a72..5988e59c90 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -7,7 +7,7 @@ }, "scripts": { "dev": "pnpm run generate:api:force && next dev --turbo", - "build": "next build", + "build": "cross-env NODE_OPTIONS=--max-old-space-size=16384 next build", "start": "next start", "start:standalone": "cd .next/standalone && node server.js", "lint": "next lint && prettier --check .", @@ -30,6 +30,7 @@ "defaults" ], "dependencies": { + "@ai-sdk/react": "3.0.61", "@faker-js/faker": "10.0.0", "@hookform/resolvers": "5.2.2", "@next/third-parties": "15.4.6", @@ -60,6 +61,10 @@ "@rjsf/utils": "6.1.2", "@rjsf/validator-ajv8": "6.1.2", "@sentry/nextjs": "10.27.0", + "@streamdown/cjk": "1.0.1", + "@streamdown/code": "1.0.1", + "@streamdown/math": "1.0.1", + "@streamdown/mermaid": "1.0.1", "@supabase/ssr": "0.7.0", "@supabase/supabase-js": "2.78.0", "@tanstack/react-query": "5.90.6", @@ -68,6 +73,7 @@ "@vercel/analytics": "1.5.0", "@vercel/speed-insights": "1.2.0", "@xyflow/react": "12.9.2", + "ai": "6.0.59", "boring-avatars": "1.11.2", "class-variance-authority": "0.7.1", "clsx": "2.1.1", @@ -87,7 +93,6 @@ "launchdarkly-react-client-sdk": "3.9.0", "lodash": "4.17.21", "lucide-react": "0.552.0", - "moment": "2.30.1", "next": "15.4.10", "next-themes": "0.4.6", "nuqs": "2.7.2", @@ -112,9 +117,11 @@ "remark-math": "6.0.0", "shepherd.js": "14.5.1", "sonner": "2.0.7", + "streamdown": "2.1.0", "tailwind-merge": "2.6.0", "tailwind-scrollbar": "3.1.0", "tailwindcss-animate": "1.0.7", + "use-stick-to-bottom": "1.1.2", "uuid": "11.1.0", "vaul": "1.1.2", "zod": "3.25.76", @@ -172,7 +179,8 @@ }, "pnpm": { "overrides": { - "@opentelemetry/instrumentation": "0.209.0" + "@opentelemetry/instrumentation": "0.209.0", + "lodash-es": "4.17.23" } }, "packageManager": "pnpm@10.20.0+sha512.cf9998222162dd85864d0a8102e7892e7ba4ceadebbf5a31f9c2fce48dfce317a9c53b9f6464d1ef9042cba2e02ae02a9f7c143a2b438cd93c91840f0192b9dd" diff --git a/autogpt_platform/frontend/pnpm-lock.yaml b/autogpt_platform/frontend/pnpm-lock.yaml index 377a298564..468e2f312d 100644 --- a/autogpt_platform/frontend/pnpm-lock.yaml +++ b/autogpt_platform/frontend/pnpm-lock.yaml @@ -6,11 +6,15 @@ settings: overrides: '@opentelemetry/instrumentation': 0.209.0 + lodash-es: 4.17.23 importers: .: dependencies: + '@ai-sdk/react': + specifier: 3.0.61 + version: 3.0.61(react@18.3.1)(zod@3.25.76) '@faker-js/faker': specifier: 10.0.0 version: 10.0.0 @@ -101,6 +105,18 @@ importers: '@sentry/nextjs': specifier: 10.27.0 version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.10(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.104.1(esbuild@0.25.12)) + '@streamdown/cjk': + specifier: 1.0.1 + version: 1.0.1(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@18.3.1)(unified@11.0.5) + '@streamdown/code': + specifier: 1.0.1 + version: 1.0.1(react@18.3.1) + '@streamdown/math': + specifier: 1.0.1 + version: 1.0.1(react@18.3.1) + '@streamdown/mermaid': + specifier: 1.0.1 + version: 1.0.1(react@18.3.1) '@supabase/ssr': specifier: 0.7.0 version: 0.7.0(@supabase/supabase-js@2.78.0) @@ -125,6 +141,9 @@ importers: '@xyflow/react': specifier: 12.9.2 version: 12.9.2(@types/react@18.3.17)(immer@11.1.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + ai: + specifier: 6.0.59 + version: 6.0.59(zod@3.25.76) boring-avatars: specifier: 1.11.2 version: 1.11.2 @@ -182,9 +201,6 @@ importers: lucide-react: specifier: 0.552.0 version: 0.552.0(react@18.3.1) - moment: - specifier: 2.30.1 - version: 2.30.1 next: specifier: 15.4.10 version: 15.4.10(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -257,6 +273,9 @@ importers: sonner: specifier: 2.0.7 version: 2.0.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + streamdown: + specifier: 2.1.0 + version: 2.1.0(react@18.3.1) tailwind-merge: specifier: 2.6.0 version: 2.6.0 @@ -266,6 +285,9 @@ importers: tailwindcss-animate: specifier: 1.0.7 version: 1.0.7(tailwindcss@3.4.17) + use-stick-to-bottom: + specifier: 1.1.2 + version: 1.1.2(react@18.3.1) uuid: specifier: 11.1.0 version: 11.1.0 @@ -417,10 +439,35 @@ packages: '@adobe/css-tools@4.4.4': resolution: {integrity: sha512-Elp+iwUx5rN5+Y8xLt5/GRoG20WGoDCQ/1Fb+1LiGtvwbDavuSk0jhD/eZdckHAuzcDzccnkv+rEjyWfRx18gg==} + '@ai-sdk/gateway@3.0.27': + resolution: {integrity: sha512-Pr+ApS9k6/jcR3kNltJNxo60OdYvnVU4DeRhzVtxUAYTXCHx4qO+qTMG9nNRn+El1acJnNRA//Su47srjXkT/w==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + + '@ai-sdk/provider-utils@4.0.10': + resolution: {integrity: sha512-VeDAiCH+ZK8Xs4hb9Cw7pHlujWNL52RKe8TExOkrw6Ir1AmfajBZTb9XUdKOZO08RwQElIKA8+Ltm+Gqfo8djQ==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + + '@ai-sdk/provider@3.0.5': + resolution: {integrity: sha512-2Xmoq6DBJqmSl80U6V9z5jJSJP7ehaJJQMy2iFUqTay06wdCqTnPVBBQbtEL8RCChenL+q5DC5H5WzU3vV3v8w==} + engines: {node: '>=18'} + + '@ai-sdk/react@3.0.61': + resolution: {integrity: sha512-vCjZBnY2+TawFBXamSKt6elAt9n1MXMfcjSd9DSgT9peCJN27qNGVSXgaGNh/B3cUgeOktFfhB2GVmIqOjvmLQ==} + engines: {node: '>=18'} + peerDependencies: + react: ^18 || ~19.0.1 || ~19.1.2 || ^19.2.1 + '@alloc/quick-lru@5.2.0': resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} + '@antfu/install-pkg@1.1.0': + resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} + '@apidevtools/json-schema-ref-parser@14.0.1': resolution: {integrity: sha512-Oc96zvmxx1fqoSEdUmfmvvb59/KDOnUoJ7s2t7bISyAn0XEz57LCCw8k2Y4Pf3mwKaZLMciESALORLgfe2frCw==} engines: {node: '>= 16'} @@ -1032,6 +1079,24 @@ packages: resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==} engines: {node: '>=6.9.0'} + '@braintree/sanitize-url@7.1.2': + resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==} + + '@chevrotain/cst-dts-gen@11.0.3': + resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==} + + '@chevrotain/gast@11.0.3': + resolution: {integrity: sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==} + + '@chevrotain/regexp-to-ast@11.0.3': + resolution: {integrity: sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==} + + '@chevrotain/types@11.0.3': + resolution: {integrity: sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==} + + '@chevrotain/utils@11.0.3': + resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==} + '@chromatic-com/storybook@4.1.2': resolution: {integrity: sha512-QAWGtHwib0qsP5CcO64aJCF75zpFgpKK3jNpxILzQiPK3sVo4EmnVGJVdwcZWpWrGdH8E4YkncGoitw4EXzKMg==} engines: {node: '>=20.0.0', yarn: '>=1.22.18'} @@ -1486,6 +1551,12 @@ packages: resolution: {integrity: sha512-oT8USsTulFAA8FiBN0lA2rJqQI2lIt+HP2pdakGQXo3EviL2vqJTgpSCRwjl6mLJL158f1BVcdQUOEFGxomK3w==} engines: {node: '>=16.0.0'} + '@iconify/types@2.0.0': + resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} + + '@iconify/utils@3.1.0': + resolution: {integrity: sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==} + '@img/colour@1.0.0': resolution: {integrity: sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==} engines: {node: '>=18'} @@ -1705,6 +1776,9 @@ packages: '@types/react': '>=16' react: '>=16' + '@mermaid-js/parser@0.6.3': + resolution: {integrity: sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==} + '@mswjs/interceptors@0.40.0': resolution: {integrity: sha512-EFd6cVbHsgLa6wa4RljGj6Wk75qoHxUSyc5asLyyPSyuhIcdS2Q3Phw6ImS1q+CkALthJRShiYfKANcQMuMqsQ==} engines: {node: '>=18'} @@ -3019,6 +3093,12 @@ packages: peerDependencies: webpack: '>=4.40.0' + '@shikijs/core@3.21.0': + resolution: {integrity: sha512-AXSQu/2n1UIQekY8euBJlvFYZIw0PHY63jUzGbrOma4wPxzznJXTXkri+QcHeBNaFxiiOljKxxJkVSoB3PjbyA==} + + '@shikijs/engine-javascript@3.21.0': + resolution: {integrity: sha512-ATwv86xlbmfD9n9gKRiwuPpWgPENAWCLwYCGz9ugTJlsO2kOzhOkvoyV/UD+tJ0uT7YRyD530x6ugNSffmvIiQ==} + '@shikijs/engine-oniguruma@3.21.0': resolution: {integrity: sha512-OYknTCct6qiwpQDqDdf3iedRdzj6hFlOPv5hMvI+hkWfCKs5mlJ4TXziBG9nyabLwGulrUjHiCq3xCspSzErYQ==} @@ -3222,6 +3302,26 @@ packages: typescript: optional: true + '@streamdown/cjk@1.0.1': + resolution: {integrity: sha512-ElDoEfad2u8iFzmgmEEab15N4mt19r47xeUIPJtHaHVyEF5baojamGo+xw3MywMj2qUsAY3LnTnKbrUtL5tGkg==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + + '@streamdown/code@1.0.1': + resolution: {integrity: sha512-U9LITfQ28tZYAoY922jdtw1ryg4kgRBdURopqK9hph7G2fBUwPeHthjH7SvaV0fvFv7EqjqCzARJuWUljLe9Ag==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + + '@streamdown/math@1.0.1': + resolution: {integrity: sha512-R9WdHbpERiRU7WeO7oT1aIbnLJ/jraDr89F7X9x2OM//Y8G8UMATRnLD/RUwg4VLr8Nu7QSIJ0Pa8lXd2meM4Q==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + + '@streamdown/mermaid@1.0.1': + resolution: {integrity: sha512-LVGbxYd6t1DKMCMqm3cpbfsdD4/EKpQelanOlJaBMKv83kbrl8syZJhVBsd/jka+CawhpeR9xsGQJzSJEpjoVw==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + '@supabase/auth-js@2.78.0': resolution: {integrity: sha512-cXDtu1U0LeZj/xfnFoV7yCze37TcbNo8FCxy1FpqhMbB9u9QxxDSW6pA5gm/07Ei7m260Lof4CZx67Cu6DPeig==} @@ -3344,21 +3444,69 @@ packages: '@types/d3-array@3.2.2': resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==} + '@types/d3-axis@3.0.6': + resolution: {integrity: sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==} + + '@types/d3-brush@3.0.6': + resolution: {integrity: sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==} + + '@types/d3-chord@3.0.6': + resolution: {integrity: sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==} + '@types/d3-color@3.1.3': resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + '@types/d3-contour@3.0.6': + resolution: {integrity: sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==} + + '@types/d3-delaunay@6.0.4': + resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} + + '@types/d3-dispatch@3.0.7': + resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==} + '@types/d3-drag@3.0.7': resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + '@types/d3-dsv@3.0.7': + resolution: {integrity: sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==} + '@types/d3-ease@3.0.2': resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + '@types/d3-fetch@3.0.7': + resolution: {integrity: sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==} + + '@types/d3-force@3.0.10': + resolution: {integrity: sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==} + + '@types/d3-format@3.0.4': + resolution: {integrity: sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==} + + '@types/d3-geo@3.1.0': + resolution: {integrity: sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==} + + '@types/d3-hierarchy@3.1.7': + resolution: {integrity: sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==} + '@types/d3-interpolate@3.0.4': resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} '@types/d3-path@3.1.1': resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + '@types/d3-polygon@3.0.2': + resolution: {integrity: sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==} + + '@types/d3-quadtree@3.0.6': + resolution: {integrity: sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==} + + '@types/d3-random@3.0.3': + resolution: {integrity: sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==} + + '@types/d3-scale-chromatic@3.1.0': + resolution: {integrity: sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==} + '@types/d3-scale@4.0.9': resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} @@ -3368,6 +3516,9 @@ packages: '@types/d3-shape@3.1.7': resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==} + '@types/d3-time-format@4.0.3': + resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==} + '@types/d3-time@3.0.4': resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} @@ -3380,6 +3531,9 @@ packages: '@types/d3-zoom@3.0.8': resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + '@types/d3@7.4.3': + resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==} + '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} @@ -3404,6 +3558,9 @@ packages: '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/geojson@7946.0.16': + resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} + '@types/hast@3.0.4': resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} @@ -3692,6 +3849,10 @@ packages: vue-router: optional: true + '@vercel/oidc@3.1.0': + resolution: {integrity: sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w==} + engines: {node: '>= 20'} + '@vercel/speed-insights@1.2.0': resolution: {integrity: sha512-y9GVzrUJ2xmgtQlzFP2KhVRoCglwfRQgjyfY607aU0hh0Un6d0OUyrJkjuAlsV18qR4zfoFPs/BiIj9YDS6Wzw==} peerDependencies: @@ -3873,6 +4034,12 @@ packages: resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} engines: {node: '>= 14'} + ai@6.0.59: + resolution: {integrity: sha512-9SfCvcr4kVk4t8ZzIuyHpuL1hFYKsYMQfBSbBq3dipXPa+MphARvI8wHEjNaRqYl3JOsJbWxEBIMqHL0L92mUA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + ajv-draft-04@1.0.0: resolution: {integrity: sha512-mv00Te6nmYbRp5DCwclxtt7yV/joXJPGS7nM+97GdxvuttCOfgI3K4U25zboyeX0O+myI8ERluxQe5wljMmVIw==} peerDependencies: @@ -4227,6 +4394,14 @@ packages: resolution: {integrity: sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==} engines: {node: '>= 16'} + chevrotain-allstar@0.3.1: + resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==} + peerDependencies: + chevrotain: ^11.0.0 + + chevrotain@11.0.3: + resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==} + chokidar@3.6.0: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} engines: {node: '>= 8.10.0'} @@ -4325,6 +4500,10 @@ packages: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} engines: {node: '>= 6'} + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + commander@8.3.0: resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} engines: {node: '>= 12'} @@ -4346,6 +4525,9 @@ packages: engines: {node: '>=18'} hasBin: true + confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + console-browserify@1.2.0: resolution: {integrity: sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==} @@ -4374,6 +4556,12 @@ packages: core-util-is@1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + cose-base@1.0.3: + resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} + + cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + cosmiconfig@7.1.0: resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} engines: {node: '>=10'} @@ -4447,14 +4635,51 @@ packages: csstype@3.2.3: resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + cytoscape-cose-bilkent@4.1.0: + resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape-fcose@2.2.0: + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape@3.33.1: + resolution: {integrity: sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==} + engines: {node: '>=0.10'} + + d3-array@2.12.1: + resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==} + d3-array@3.2.4: resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} engines: {node: '>=12'} + d3-axis@3.0.0: + resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==} + engines: {node: '>=12'} + + d3-brush@3.0.0: + resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==} + engines: {node: '>=12'} + + d3-chord@3.0.1: + resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==} + engines: {node: '>=12'} + d3-color@3.1.0: resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} engines: {node: '>=12'} + d3-contour@4.0.2: + resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==} + engines: {node: '>=12'} + + d3-delaunay@6.0.4: + resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==} + engines: {node: '>=12'} + d3-dispatch@3.0.1: resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} engines: {node: '>=12'} @@ -4463,22 +4688,65 @@ packages: resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} engines: {node: '>=12'} + d3-dsv@3.0.1: + resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==} + engines: {node: '>=12'} + hasBin: true + d3-ease@3.0.1: resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} engines: {node: '>=12'} + d3-fetch@3.0.1: + resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==} + engines: {node: '>=12'} + + d3-force@3.0.0: + resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} + engines: {node: '>=12'} + d3-format@3.1.0: resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} engines: {node: '>=12'} + d3-geo@3.1.1: + resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==} + engines: {node: '>=12'} + + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + d3-interpolate@3.0.1: resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} engines: {node: '>=12'} + d3-path@1.0.9: + resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==} + d3-path@3.1.0: resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} engines: {node: '>=12'} + d3-polygon@3.0.1: + resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==} + engines: {node: '>=12'} + + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + + d3-random@3.0.1: + resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==} + engines: {node: '>=12'} + + d3-sankey@0.12.3: + resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==} + + d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==} + engines: {node: '>=12'} + d3-scale@4.0.2: resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} engines: {node: '>=12'} @@ -4487,6 +4755,9 @@ packages: resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} engines: {node: '>=12'} + d3-shape@1.3.7: + resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==} + d3-shape@3.2.0: resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} engines: {node: '>=12'} @@ -4513,6 +4784,13 @@ packages: resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} engines: {node: '>=12'} + d3@7.9.0: + resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} + engines: {node: '>=12'} + + dagre-d3-es@7.0.13: + resolution: {integrity: sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==} + damerau-levenshtein@1.0.8: resolution: {integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==} @@ -4538,6 +4816,9 @@ packages: date-fns@4.1.0: resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} + dayjs@1.11.19: + resolution: {integrity: sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==} + debug@3.2.7: resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} peerDependencies: @@ -4594,6 +4875,9 @@ packages: resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} engines: {node: '>= 0.4'} + delaunator@5.0.1: + resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} + dependency-graph@0.11.0: resolution: {integrity: sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==} engines: {node: '>= 0.6.0'} @@ -4974,6 +5258,10 @@ packages: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} + eventsource-parser@3.0.6: + resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} + engines: {node: '>=18.0.0'} + evp_bytestokey@1.0.3: resolution: {integrity: sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==} @@ -5174,6 +5462,10 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} + get-east-asian-width@1.4.0: + resolution: {integrity: sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==} + engines: {node: '>=18'} + get-intrinsic@1.3.0: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} @@ -5213,11 +5505,12 @@ packages: glob@10.5.0: resolution: {integrity: sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} - deprecated: Glob versions prior to v9 are no longer supported + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me globals@13.24.0: resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} @@ -5248,6 +5541,9 @@ packages: resolution: {integrity: sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==} engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + hachure-fill@0.5.2: + resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} + happy-dom@20.3.4: resolution: {integrity: sha512-rfbiwB6OKxZFIFQ7SRnCPB2WL9WhyXsFoTfecYgeCeFSOBxvkWLaXsdv5ehzJrfqwXQmDephAKWLRQoFoJwrew==} engines: {node: '>=20.0.0'} @@ -5311,9 +5607,21 @@ packages: hast-util-parse-selector@4.0.0: resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==} + hast-util-raw@9.1.0: + resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==} + + hast-util-sanitize@5.0.2: + resolution: {integrity: sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg==} + + hast-util-to-html@9.0.5: + resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==} + hast-util-to-jsx-runtime@2.3.6: resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==} + hast-util-to-parse5@8.0.1: + resolution: {integrity: sha512-MlWT6Pjt4CG9lFCjiz4BH7l9wmrMkfkJYCxFwKQic8+RTZgWPuWxwAfjJElsXkex7DJjfSJsQIt931ilUgmwdA==} + hast-util-to-string@3.0.1: resolution: {integrity: sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==} @@ -5358,6 +5666,9 @@ packages: html-url-attributes@3.0.1: resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==} + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + html-webpack-plugin@5.6.5: resolution: {integrity: sha512-4xynFbKNNk+WlzXeQQ+6YYsH2g7mpfPszQZUi3ovKlj+pDmngQ7vRXjrrmGROabmKwyQkcgcX5hqfOwHbFmK5g==} engines: {node: '>=10.13.0'} @@ -5395,6 +5706,10 @@ packages: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + icss-utils@5.1.0: resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==} engines: {node: ^10 || ^12 || >= 14} @@ -5458,6 +5773,9 @@ packages: resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==} engines: {node: '>= 0.4'} + internmap@1.0.1: + resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} + internmap@2.0.3: resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} engines: {node: '>=12'} @@ -5698,6 +6016,9 @@ packages: json-schema-traverse@1.0.0: resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + json-schema@0.4.0: + resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==} + json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} @@ -5740,9 +6061,20 @@ packages: resolution: {integrity: sha512-woHRUZ/iF23GBP1dkDQMh1QBad9dmr8/PAwNA54VrSOVYgI12MAcE14TqnDdQOdzyEonGzMepYnqBMYdsoAr8Q==} hasBin: true + katex@0.16.28: + resolution: {integrity: sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==} + hasBin: true + keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + khroma@2.1.0: + resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} + + langium@3.3.1: + resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==} + engines: {node: '>=16.0.0'} + language-subtag-registry@0.3.23: resolution: {integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==} @@ -5762,6 +6094,12 @@ packages: react: ^16.6.3 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.4 || ^17.0.0 || ^18.0.0 || ^19.0.0 + layout-base@1.0.2: + resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} + + layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} engines: {node: '>=6'} @@ -5804,8 +6142,8 @@ packages: resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - lodash-es@4.17.22: - resolution: {integrity: sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==} + lodash-es@4.17.23: + resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} lodash.camelcase@4.3.0: resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} @@ -5912,6 +6250,16 @@ packages: react: optional: true + marked@16.4.2: + resolution: {integrity: sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==} + engines: {node: '>= 20'} + hasBin: true + + marked@17.0.1: + resolution: {integrity: sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg==} + engines: {node: '>= 20'} + hasBin: true + math-intrinsics@1.1.0: resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} engines: {node: '>= 0.4'} @@ -5984,9 +6332,41 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} + mermaid@11.12.2: + resolution: {integrity: sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==} + micromark-core-commonmark@2.0.3: resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==} + micromark-extension-cjk-friendly-gfm-strikethrough@1.2.3: + resolution: {integrity: sha512-gSPnxgHDDqXYOBvQRq6lerrq9mjDhdtKn+7XETuXjxWcL62yZEfUdA28Ml1I2vDIPfAOIKLa0h2XDSGkInGHFQ==} + engines: {node: '>=16'} + peerDependencies: + micromark: ^4.0.0 + micromark-util-types: ^2.0.0 + peerDependenciesMeta: + micromark-util-types: + optional: true + + micromark-extension-cjk-friendly-util@2.1.1: + resolution: {integrity: sha512-egs6+12JU2yutskHY55FyR48ZiEcFOJFyk9rsiyIhcJ6IvWB6ABBqVrBw8IobqJTDZ/wdSr9eoXDPb5S2nW1bg==} + engines: {node: '>=16'} + peerDependencies: + micromark-util-types: '*' + peerDependenciesMeta: + micromark-util-types: + optional: true + + micromark-extension-cjk-friendly@1.2.3: + resolution: {integrity: sha512-gRzVLUdjXBLX6zNPSnHGDoo+ZTp5zy+MZm0g3sv+3chPXY7l9gW+DnrcHcZh/jiPR6MjPKO4AEJNp4Aw6V9z5Q==} + engines: {node: '>=16'} + peerDependencies: + micromark: ^4.0.0 + micromark-util-types: ^2.0.0 + peerDependenciesMeta: + micromark-util-types: + optional: true + micromark-extension-gfm-autolink-literal@2.1.0: resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==} @@ -6119,12 +6499,12 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} + mlly@1.8.0: + resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} + module-details-from-path@1.0.4: resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} - moment@2.30.1: - resolution: {integrity: sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==} - motion-dom@12.24.8: resolution: {integrity: sha512-wX64WITk6gKOhaTqhsFqmIkayLAAx45SVFiMnJIxIrH5uqyrwrxjrfo8WX9Kh8CaUAixjeMn82iH0W0QT9wD5w==} @@ -6339,6 +6719,12 @@ packages: resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} engines: {node: '>=6'} + oniguruma-parser@0.12.1: + resolution: {integrity: sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==} + + oniguruma-to-es@4.3.4: + resolution: {integrity: sha512-3VhUGN3w2eYxnTzHn+ikMI+fp/96KoRSVK9/kMTcFqj1NRDh2IhQCKvYxDnWePKRXY/AqH+Fuiyb7VHSzBjHfA==} + open@8.4.2: resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} engines: {node: '>=12'} @@ -6398,6 +6784,9 @@ packages: package-json-from-dist@1.0.1: resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + package-manager-detector@1.6.0: + resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==} + pako@1.0.11: resolution: {integrity: sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==} @@ -6434,6 +6823,9 @@ packages: path-browserify@1.0.1: resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} + path-data-parser@0.1.0: + resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==} + path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} @@ -6513,6 +6905,9 @@ packages: resolution: {integrity: sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==} engines: {node: '>=14.16'} + pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + playwright-core@1.56.1: resolution: {integrity: sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==} engines: {node: '>=18'} @@ -6523,6 +6918,12 @@ packages: engines: {node: '>=18'} hasBin: true + points-on-curve@0.2.0: + resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} + + points-on-path@0.2.1: + resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} + pony-cause@1.1.1: resolution: {integrity: sha512-PxkIc/2ZpLiEzQXu5YRDOUgBlfGYBY8156HY5ZcRAwwonMk5W/MrJP2LLkG/hF7GEQzaHo2aS7ho6ZLCOvf+6g==} engines: {node: '>=12.0.0'} @@ -6962,6 +7363,15 @@ packages: regex-parser@2.3.1: resolution: {integrity: sha512-yXLRqatcCuKtVHsWrNg0JL3l1zGfdXeEvDa0bdu4tCDQw0RpMDZsqbkyRTUnKMR0tXF627V2oEWjBEaEdqTwtQ==} + regex-recursion@6.0.2: + resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==} + + regex-utilities@2.3.0: + resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==} + + regex@6.1.0: + resolution: {integrity: sha512-6VwtthbV4o/7+OaAF9I5L5V3llLEsoPyq9P1JVXkedTP33c7MfCG0/5NOPcSJn0TzXcG9YUrR0gQSWioew3LDg==} + regexp.prototype.flags@1.5.4: resolution: {integrity: sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==} engines: {node: '>= 0.4'} @@ -6980,12 +7390,21 @@ packages: rehype-autolink-headings@7.1.0: resolution: {integrity: sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw==} + rehype-harden@1.1.7: + resolution: {integrity: sha512-j5DY0YSK2YavvNGV+qBHma15J9m0WZmRe8posT5AtKDS6TNWtMVTo6RiqF8SidfcASYz8f3k2J/1RWmq5zTXUw==} + rehype-highlight@7.0.2: resolution: {integrity: sha512-k158pK7wdC2qL3M5NcZROZ2tR/l7zOzjxXd5VGdcfIyoijjQqpHd3JKtYSBDpDZ38UI2WJWuFAtkMDxmx5kstA==} rehype-katex@7.0.1: resolution: {integrity: sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==} + rehype-raw@7.0.0: + resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==} + + rehype-sanitize@6.0.0: + resolution: {integrity: sha512-CsnhKNsyI8Tub6L4sm5ZFsme4puGfc6pYylvXo1AeqaGbjOYyzNv3qZPwvs0oMJ39eryyeOdmxwUIo94IpEhqg==} + rehype-slug@6.0.0: resolution: {integrity: sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==} @@ -6993,6 +7412,26 @@ packages: resolution: {integrity: sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==} engines: {node: '>= 0.10'} + remark-cjk-friendly-gfm-strikethrough@1.2.3: + resolution: {integrity: sha512-bXfMZtsaomK6ysNN/UGRIcasQAYkC10NtPmP0oOHOV8YOhA2TXmwRXCku4qOzjIFxAPfish5+XS0eIug2PzNZA==} + engines: {node: '>=16'} + peerDependencies: + '@types/mdast': ^4.0.0 + unified: ^11.0.0 + peerDependenciesMeta: + '@types/mdast': + optional: true + + remark-cjk-friendly@1.2.3: + resolution: {integrity: sha512-UvAgxwlNk+l9Oqgl/9MWK2eWRS7zgBW/nXX9AthV7nd/3lNejF138E7Xbmk9Zs4WjTJGs721r7fAEc7tNFoH7g==} + engines: {node: '>=16'} + peerDependencies: + '@types/mdast': ^4.0.0 + unified: ^11.0.0 + peerDependenciesMeta: + '@types/mdast': + optional: true + remark-gfm@4.0.1: resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==} @@ -7008,6 +7447,9 @@ packages: remark-stringify@11.0.0: resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} + remend@1.1.0: + resolution: {integrity: sha512-JENGyuIhTwzUfCarW43X4r9cehoqTo9QyYxfNDZSud2AmqeuWjZ5pfybasTa4q0dxTJAj5m8NB+wR+YueAFpxQ==} + renderkid@3.0.0: resolution: {integrity: sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==} @@ -7066,14 +7508,23 @@ packages: resolution: {integrity: sha512-5Di9UC0+8h1L6ZD2d7awM7E/T4uA1fJRlx6zk/NvdCCVEoAnFqvHmCuNeIKoCeIixBX/q8uM+6ycDvF8woqosA==} engines: {node: '>= 0.8'} + robust-predicates@3.0.2: + resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + rollup@4.55.1: resolution: {integrity: sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + roughjs@4.6.6: + resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + rxjs@7.8.2: resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} @@ -7098,6 +7549,9 @@ packages: safe-stable-stringify@1.1.1: resolution: {integrity: sha512-ERq4hUjKDbJfE4+XtZLFPCDi8Vb1JqaxAPTxWFLBx8XcAlf9Bda/ZJdVezs/NAfsMQScyIlUMx+Yeu7P7rx5jw==} + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + sass-loader@16.0.6: resolution: {integrity: sha512-sglGzId5gmlfxNs4gK2U3h7HlVRfx278YK6Ono5lwzuvi1jxig80YiuHkaDBVsYIKFhx8wN7XSCI0M2IDS/3qA==} engines: {node: '>= 18.12.0'} @@ -7186,6 +7640,9 @@ packages: resolution: {integrity: sha512-VuvPvLG1QjNOLP7AIm2HGyfmxEIz8QdskvWOHwUcxLDibYWjLRBmCWd8LSL5FlwhBW7D/GU+3gNVC/ASxAWdxg==} engines: {node: 18.* || >= 20} + shiki@3.21.0: + resolution: {integrity: sha512-N65B/3bqL/TI2crrXr+4UivctrAGEjmsib5rPMMPpFp1xAx/w03v8WZ9RDDFYteXoEgY7qZ4HGgl5KBIu1153w==} + should-equal@2.0.0: resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} @@ -7301,6 +7758,11 @@ packages: stream-http@3.2.0: resolution: {integrity: sha512-Oq1bLqisTyK3TSCXpPbT4sdeYNdmyZJv1LxpEm2vu1ZhK89kSE5YXwZc3cWk0MagGaKriBh9mCFbVGtO+vY29A==} + streamdown@2.1.0: + resolution: {integrity: sha512-u9gWd0AmjKg1d+74P44XaPlGrMeC21oDOSIhjGNEYMAttDMzCzlJO6lpTyJ9JkSinQQF65YcK4eOd3q9iTvULw==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + strict-event-emitter@0.5.1: resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==} @@ -7414,6 +7876,9 @@ packages: babel-plugin-macros: optional: true + stylis@4.3.6: + resolution: {integrity: sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==} + sucrase@3.35.1: resolution: {integrity: sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==} engines: {node: '>=16 || 14 >=14.17'} @@ -7435,12 +7900,20 @@ packages: resolution: {integrity: sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==} hasBin: true + swr@2.3.8: + resolution: {integrity: sha512-gaCPRVoMq8WGDcWj9p4YWzCMPHzE0WNl6W8ADIx9c3JBEIdMkJGMzW+uzXvxHMltwcYACr9jP+32H8/hgwMR7w==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} tailwind-merge@2.6.0: resolution: {integrity: sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA==} + tailwind-merge@3.4.0: + resolution: {integrity: sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g==} + tailwind-scrollbar@3.1.0: resolution: {integrity: sha512-pmrtDIZeHyu2idTejfV59SbaJyvp1VRjYxAjZBH0jnyrPRo6HL1kD5Glz8VPagasqr6oAx6M05+Tuw429Z8jxg==} engines: {node: '>=12.13.0'} @@ -7495,6 +7968,10 @@ packages: third-party-capital@1.0.20: resolution: {integrity: sha512-oB7yIimd8SuGptespDAZnNkzIz+NWaJCu2RMsbs4Wmp9zSDUM8Nhi3s2OOcqYuv3mN4hitXc8DVx+LyUmbUDiA==} + throttleit@2.1.0: + resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==} + engines: {node: '>=18'} + timers-browserify@2.0.12: resolution: {integrity: sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==} engines: {node: '>=0.6.0'} @@ -7677,6 +8154,9 @@ packages: uc.micro@2.1.0: resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} + ufo@1.6.3: + resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==} + unbox-primitive@1.1.0: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} @@ -7781,6 +8261,11 @@ packages: '@types/react': optional: true + use-stick-to-bottom@1.1.2: + resolution: {integrity: sha512-ssUfMNvfH8a8hGLoAt5kcOsjbsVORknon2tbkECuf3EsVucFFBbyXl+Xnv3b58P8ZRuZelzO81fgb6M0eRo8cg==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + use-sync-external-store@1.6.0: resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} peerDependencies: @@ -7918,6 +8403,26 @@ packages: vm-browserify@1.1.2: resolution: {integrity: sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==} + vscode-jsonrpc@8.2.0: + resolution: {integrity: sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==} + engines: {node: '>=14.0.0'} + + vscode-languageserver-protocol@3.17.5: + resolution: {integrity: sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==} + + vscode-languageserver-textdocument@1.0.12: + resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==} + + vscode-languageserver-types@3.17.5: + resolution: {integrity: sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==} + + vscode-languageserver@9.0.1: + resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==} + hasBin: true + + vscode-uri@3.0.8: + resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==} + w3c-xmlserializer@5.0.0: resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==} engines: {node: '>=18'} @@ -8147,8 +8652,41 @@ snapshots: '@adobe/css-tools@4.4.4': {} + '@ai-sdk/gateway@3.0.27(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 3.0.5 + '@ai-sdk/provider-utils': 4.0.10(zod@3.25.76) + '@vercel/oidc': 3.1.0 + zod: 3.25.76 + + '@ai-sdk/provider-utils@4.0.10(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 3.0.5 + '@standard-schema/spec': 1.1.0 + eventsource-parser: 3.0.6 + zod: 3.25.76 + + '@ai-sdk/provider@3.0.5': + dependencies: + json-schema: 0.4.0 + + '@ai-sdk/react@3.0.61(react@18.3.1)(zod@3.25.76)': + dependencies: + '@ai-sdk/provider-utils': 4.0.10(zod@3.25.76) + ai: 6.0.59(zod@3.25.76) + react: 18.3.1 + swr: 2.3.8(react@18.3.1) + throttleit: 2.1.0 + transitivePeerDependencies: + - zod + '@alloc/quick-lru@5.2.0': {} + '@antfu/install-pkg@1.1.0': + dependencies: + package-manager-detector: 1.6.0 + tinyexec: 1.0.2 + '@apidevtools/json-schema-ref-parser@14.0.1': dependencies: '@types/json-schema': 7.0.15 @@ -8962,6 +9500,25 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 + '@braintree/sanitize-url@7.1.2': {} + + '@chevrotain/cst-dts-gen@11.0.3': + dependencies: + '@chevrotain/gast': 11.0.3 + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.23 + + '@chevrotain/gast@11.0.3': + dependencies: + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.23 + + '@chevrotain/regexp-to-ast@11.0.3': {} + + '@chevrotain/types@11.0.3': {} + + '@chevrotain/utils@11.0.3': {} + '@chromatic-com/storybook@4.1.2(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2)(vite@7.3.1(@types/node@24.10.0)(jiti@2.6.1)(terser@5.44.1)(yaml@2.8.2)))': dependencies: '@neoconfetti/react': 1.0.0 @@ -9281,6 +9838,14 @@ snapshots: transitivePeerDependencies: - encoding + '@iconify/types@2.0.0': {} + + '@iconify/utils@3.1.0': + dependencies: + '@antfu/install-pkg': 1.1.0 + '@iconify/types': 2.0.0 + mlly: 1.8.0 + '@img/colour@1.0.0': optional: true @@ -9457,6 +10022,10 @@ snapshots: '@types/react': 18.3.17 react: 18.3.1 + '@mermaid-js/parser@0.6.3': + dependencies: + langium: 3.3.1 + '@mswjs/interceptors@0.40.0': dependencies: '@open-draft/deferred-promise': 2.2.0 @@ -10608,7 +11177,7 @@ snapshots: dependencies: '@rjsf/utils': 6.1.2(react@18.3.1) lodash: 4.17.21 - lodash-es: 4.17.22 + lodash-es: 4.17.23 markdown-to-jsx: 8.0.0(react@18.3.1) prop-types: 15.8.1 react: 18.3.1 @@ -10619,7 +11188,7 @@ snapshots: fast-uri: 3.1.0 jsonpointer: 5.0.1 lodash: 4.17.21 - lodash-es: 4.17.22 + lodash-es: 4.17.23 react: 18.3.1 react-is: 18.3.1 @@ -10629,7 +11198,7 @@ snapshots: ajv: 8.17.1 ajv-formats: 2.1.1(ajv@8.17.1) lodash: 4.17.21 - lodash-es: 4.17.22 + lodash-es: 4.17.23 '@rolldown/pluginutils@1.0.0-beta.53': {} @@ -10936,6 +11505,19 @@ snapshots: - encoding - supports-color + '@shikijs/core@3.21.0': + dependencies: + '@shikijs/types': 3.21.0 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + + '@shikijs/engine-javascript@3.21.0': + dependencies: + '@shikijs/types': 3.21.0 + '@shikijs/vscode-textmate': 10.0.2 + oniguruma-to-es: 4.3.4 + '@shikijs/engine-oniguruma@3.21.0': dependencies: '@shikijs/types': 3.21.0 @@ -11313,6 +11895,37 @@ snapshots: optionalDependencies: typescript: 5.9.3 + '@streamdown/cjk@1.0.1(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(react@18.3.1)(unified@11.0.5)': + dependencies: + react: 18.3.1 + remark-cjk-friendly: 1.2.3(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(unified@11.0.5) + remark-cjk-friendly-gfm-strikethrough: 1.2.3(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(unified@11.0.5) + unist-util-visit: 5.0.0 + transitivePeerDependencies: + - '@types/mdast' + - micromark + - micromark-util-types + - unified + + '@streamdown/code@1.0.1(react@18.3.1)': + dependencies: + react: 18.3.1 + shiki: 3.21.0 + + '@streamdown/math@1.0.1(react@18.3.1)': + dependencies: + katex: 0.16.28 + react: 18.3.1 + rehype-katex: 7.0.1 + remark-math: 6.0.0 + transitivePeerDependencies: + - supports-color + + '@streamdown/mermaid@1.0.1(react@18.3.1)': + dependencies: + mermaid: 11.12.2 + react: 18.3.1 + '@supabase/auth-js@2.78.0': dependencies: '@supabase/node-fetch': 2.6.15 @@ -11475,20 +12088,63 @@ snapshots: '@types/d3-array@3.2.2': {} + '@types/d3-axis@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-brush@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-chord@3.0.6': {} + '@types/d3-color@3.1.3': {} + '@types/d3-contour@3.0.6': + dependencies: + '@types/d3-array': 3.2.2 + '@types/geojson': 7946.0.16 + + '@types/d3-delaunay@6.0.4': {} + + '@types/d3-dispatch@3.0.7': {} + '@types/d3-drag@3.0.7': dependencies: '@types/d3-selection': 3.0.11 + '@types/d3-dsv@3.0.7': {} + '@types/d3-ease@3.0.2': {} + '@types/d3-fetch@3.0.7': + dependencies: + '@types/d3-dsv': 3.0.7 + + '@types/d3-force@3.0.10': {} + + '@types/d3-format@3.0.4': {} + + '@types/d3-geo@3.1.0': + dependencies: + '@types/geojson': 7946.0.16 + + '@types/d3-hierarchy@3.1.7': {} + '@types/d3-interpolate@3.0.4': dependencies: '@types/d3-color': 3.1.3 '@types/d3-path@3.1.1': {} + '@types/d3-polygon@3.0.2': {} + + '@types/d3-quadtree@3.0.6': {} + + '@types/d3-random@3.0.3': {} + + '@types/d3-scale-chromatic@3.1.0': {} + '@types/d3-scale@4.0.9': dependencies: '@types/d3-time': 3.0.4 @@ -11499,6 +12155,8 @@ snapshots: dependencies: '@types/d3-path': 3.1.1 + '@types/d3-time-format@4.0.3': {} + '@types/d3-time@3.0.4': {} '@types/d3-timer@3.0.2': {} @@ -11512,6 +12170,39 @@ snapshots: '@types/d3-interpolate': 3.0.4 '@types/d3-selection': 3.0.11 + '@types/d3@7.4.3': + dependencies: + '@types/d3-array': 3.2.2 + '@types/d3-axis': 3.0.6 + '@types/d3-brush': 3.0.6 + '@types/d3-chord': 3.0.6 + '@types/d3-color': 3.1.3 + '@types/d3-contour': 3.0.6 + '@types/d3-delaunay': 6.0.4 + '@types/d3-dispatch': 3.0.7 + '@types/d3-drag': 3.0.7 + '@types/d3-dsv': 3.0.7 + '@types/d3-ease': 3.0.2 + '@types/d3-fetch': 3.0.7 + '@types/d3-force': 3.0.10 + '@types/d3-format': 3.0.4 + '@types/d3-geo': 3.1.0 + '@types/d3-hierarchy': 3.1.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-path': 3.1.1 + '@types/d3-polygon': 3.0.2 + '@types/d3-quadtree': 3.0.6 + '@types/d3-random': 3.0.3 + '@types/d3-scale': 4.0.9 + '@types/d3-scale-chromatic': 3.1.0 + '@types/d3-selection': 3.0.11 + '@types/d3-shape': 3.1.7 + '@types/d3-time': 3.0.4 + '@types/d3-time-format': 4.0.3 + '@types/d3-timer': 3.0.2 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + '@types/debug@4.1.12': dependencies: '@types/ms': 2.1.0 @@ -11540,6 +12231,8 @@ snapshots: '@types/estree@1.0.8': {} + '@types/geojson@7946.0.16': {} + '@types/hast@3.0.4': dependencies: '@types/unist': 3.0.3 @@ -11796,6 +12489,8 @@ snapshots: next: 15.4.10(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 + '@vercel/oidc@3.1.0': {} + '@vercel/speed-insights@1.2.0(next@15.4.10(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)': optionalDependencies: next: 15.4.10(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -12023,6 +12718,14 @@ snapshots: agent-base@7.1.4: optional: true + ai@6.0.59(zod@3.25.76): + dependencies: + '@ai-sdk/gateway': 3.0.27(zod@3.25.76) + '@ai-sdk/provider': 3.0.5 + '@ai-sdk/provider-utils': 4.0.10(zod@3.25.76) + '@opentelemetry/api': 1.9.0 + zod: 3.25.76 + ajv-draft-04@1.0.0(ajv@8.17.1): optionalDependencies: ajv: 8.17.1 @@ -12411,6 +13114,20 @@ snapshots: check-error@2.1.3: {} + chevrotain-allstar@0.3.1(chevrotain@11.0.3): + dependencies: + chevrotain: 11.0.3 + lodash-es: 4.17.23 + + chevrotain@11.0.3: + dependencies: + '@chevrotain/cst-dts-gen': 11.0.3 + '@chevrotain/gast': 11.0.3 + '@chevrotain/regexp-to-ast': 11.0.3 + '@chevrotain/types': 11.0.3 + '@chevrotain/utils': 11.0.3 + lodash-es: 4.17.23 + chokidar@3.6.0: dependencies: anymatch: 3.1.3 @@ -12491,6 +13208,8 @@ snapshots: commander@4.1.1: {} + commander@7.2.0: {} + commander@8.3.0: {} common-path-prefix@3.0.0: {} @@ -12510,6 +13229,8 @@ snapshots: tree-kill: 1.2.2 yargs: 17.7.2 + confbox@0.1.8: {} + console-browserify@1.2.0: {} constants-browserify@1.0.0: {} @@ -12530,6 +13251,14 @@ snapshots: core-util-is@1.0.3: {} + cose-base@1.0.3: + dependencies: + layout-base: 1.0.2 + + cose-base@2.2.0: + dependencies: + layout-base: 2.0.1 + cosmiconfig@7.1.0: dependencies: '@types/parse-json': 4.0.2 @@ -12638,12 +13367,50 @@ snapshots: csstype@3.2.3: {} + cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.1): + dependencies: + cose-base: 1.0.3 + cytoscape: 3.33.1 + + cytoscape-fcose@2.2.0(cytoscape@3.33.1): + dependencies: + cose-base: 2.2.0 + cytoscape: 3.33.1 + + cytoscape@3.33.1: {} + + d3-array@2.12.1: + dependencies: + internmap: 1.0.1 + d3-array@3.2.4: dependencies: internmap: 2.0.3 + d3-axis@3.0.0: {} + + d3-brush@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3-chord@3.0.1: + dependencies: + d3-path: 3.1.0 + d3-color@3.1.0: {} + d3-contour@4.0.2: + dependencies: + d3-array: 3.2.4 + + d3-delaunay@6.0.4: + dependencies: + delaunator: 5.0.1 + d3-dispatch@3.0.1: {} d3-drag@3.0.0: @@ -12651,16 +13418,56 @@ snapshots: d3-dispatch: 3.0.1 d3-selection: 3.0.0 + d3-dsv@3.0.1: + dependencies: + commander: 7.2.0 + iconv-lite: 0.6.3 + rw: 1.3.3 + d3-ease@3.0.1: {} + d3-fetch@3.0.1: + dependencies: + d3-dsv: 3.0.1 + + d3-force@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 + d3-format@3.1.0: {} + d3-geo@3.1.1: + dependencies: + d3-array: 3.2.4 + + d3-hierarchy@3.1.2: {} + d3-interpolate@3.0.1: dependencies: d3-color: 3.1.0 + d3-path@1.0.9: {} + d3-path@3.1.0: {} + d3-polygon@3.0.1: {} + + d3-quadtree@3.0.1: {} + + d3-random@3.0.1: {} + + d3-sankey@0.12.3: + dependencies: + d3-array: 2.12.1 + d3-shape: 1.3.7 + + d3-scale-chromatic@3.1.0: + dependencies: + d3-color: 3.1.0 + d3-interpolate: 3.0.1 + d3-scale@4.0.2: dependencies: d3-array: 3.2.4 @@ -12671,6 +13478,10 @@ snapshots: d3-selection@3.0.0: {} + d3-shape@1.3.7: + dependencies: + d3-path: 1.0.9 + d3-shape@3.2.0: dependencies: d3-path: 3.1.0 @@ -12702,6 +13513,44 @@ snapshots: d3-selection: 3.0.0 d3-transition: 3.0.1(d3-selection@3.0.0) + d3@7.9.0: + dependencies: + d3-array: 3.2.4 + d3-axis: 3.0.0 + d3-brush: 3.0.0 + d3-chord: 3.0.1 + d3-color: 3.1.0 + d3-contour: 4.0.2 + d3-delaunay: 6.0.4 + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-dsv: 3.0.1 + d3-ease: 3.0.1 + d3-fetch: 3.0.1 + d3-force: 3.0.0 + d3-format: 3.1.0 + d3-geo: 3.1.1 + d3-hierarchy: 3.1.2 + d3-interpolate: 3.0.1 + d3-path: 3.1.0 + d3-polygon: 3.0.1 + d3-quadtree: 3.0.1 + d3-random: 3.0.1 + d3-scale: 4.0.2 + d3-scale-chromatic: 3.1.0 + d3-selection: 3.0.0 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-timer: 3.0.1 + d3-transition: 3.0.1(d3-selection@3.0.0) + d3-zoom: 3.0.0 + + dagre-d3-es@7.0.13: + dependencies: + d3: 7.9.0 + lodash-es: 4.17.23 + damerau-levenshtein@1.0.8: {} data-urls@6.0.1: @@ -12732,6 +13581,8 @@ snapshots: date-fns@4.1.0: {} + dayjs@1.11.19: {} + debug@3.2.7: dependencies: ms: 2.1.3 @@ -12773,6 +13624,10 @@ snapshots: has-property-descriptors: 1.0.2 object-keys: 1.1.1 + delaunator@5.0.1: + dependencies: + robust-predicates: 3.0.2 + dependency-graph@0.11.0: {} dequal@2.0.3: {} @@ -13347,6 +14202,8 @@ snapshots: events@3.3.0: {} + eventsource-parser@3.0.6: {} + evp_bytestokey@1.0.3: dependencies: md5.js: 1.3.5 @@ -13553,6 +14410,8 @@ snapshots: get-caller-file@2.0.5: {} + get-east-asian-width@1.4.0: {} + get-intrinsic@1.3.0: dependencies: call-bind-apply-helpers: 1.0.2 @@ -13643,6 +14502,8 @@ snapshots: graphql@16.12.0: {} + hachure-fill@0.5.2: {} + happy-dom@20.3.4: dependencies: '@types/node': 24.10.0 @@ -13739,6 +14600,42 @@ snapshots: dependencies: '@types/hast': 3.0.4 + hast-util-raw@9.1.0: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + '@ungap/structured-clone': 1.3.0 + hast-util-from-parse5: 8.0.3 + hast-util-to-parse5: 8.0.1 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + parse5: 7.3.0 + unist-util-position: 5.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + + hast-util-sanitize@5.0.2: + dependencies: + '@types/hast': 3.0.4 + '@ungap/structured-clone': 1.3.0 + unist-util-position: 5.0.0 + + hast-util-to-html@9.0.5: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + hast-util-to-jsx-runtime@2.3.6: dependencies: '@types/estree': 1.0.8 @@ -13759,6 +14656,16 @@ snapshots: transitivePeerDependencies: - supports-color + hast-util-to-parse5@8.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + hast-util-to-string@3.0.1: dependencies: '@types/hast': 3.0.4 @@ -13819,6 +14726,8 @@ snapshots: html-url-attributes@3.0.1: {} + html-void-elements@3.0.0: {} + html-webpack-plugin@5.6.5(webpack@5.104.1(esbuild@0.25.12)): dependencies: '@types/html-minifier-terser': 6.1.0 @@ -13865,6 +14774,10 @@ snapshots: human-signals@2.1.0: {} + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + icss-utils@5.1.0(postcss@8.5.6): dependencies: postcss: 8.5.6 @@ -13916,6 +14829,8 @@ snapshots: hasown: 2.0.2 side-channel: 1.1.0 + internmap@1.0.1: {} + internmap@2.0.3: {} is-alphabetical@2.0.1: {} @@ -14164,6 +15079,8 @@ snapshots: json-schema-traverse@1.0.0: {} + json-schema@0.4.0: {} + json-stable-stringify-without-jsonify@1.0.1: {} json5@1.0.2: @@ -14207,10 +15124,24 @@ snapshots: dependencies: commander: 8.3.0 + katex@0.16.28: + dependencies: + commander: 8.3.0 + keyv@4.5.4: dependencies: json-buffer: 3.0.1 + khroma@2.1.0: {} + + langium@3.3.1: + dependencies: + chevrotain: 11.0.3 + chevrotain-allstar: 0.3.1(chevrotain@11.0.3) + vscode-languageserver: 9.0.1 + vscode-languageserver-textdocument: 1.0.12 + vscode-uri: 3.0.8 + language-subtag-registry@0.3.23: {} language-tags@1.0.9: @@ -14236,6 +15167,10 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) + layout-base@1.0.2: {} + + layout-base@2.0.1: {} + leven@3.1.0: {} levn@0.4.1: @@ -14273,7 +15208,7 @@ snapshots: dependencies: p-locate: 6.0.0 - lodash-es@4.17.22: {} + lodash-es@4.17.23: {} lodash.camelcase@4.3.0: {} @@ -14363,6 +15298,10 @@ snapshots: optionalDependencies: react: 18.3.1 + marked@16.4.2: {} + + marked@17.0.1: {} + math-intrinsics@1.1.0: {} md5.js@1.3.5: @@ -14549,6 +15488,29 @@ snapshots: merge2@1.4.1: {} + mermaid@11.12.2: + dependencies: + '@braintree/sanitize-url': 7.1.2 + '@iconify/utils': 3.1.0 + '@mermaid-js/parser': 0.6.3 + '@types/d3': 7.4.3 + cytoscape: 3.33.1 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1) + cytoscape-fcose: 2.2.0(cytoscape@3.33.1) + d3: 7.9.0 + d3-sankey: 0.12.3 + dagre-d3-es: 7.0.13 + dayjs: 1.11.19 + dompurify: 3.3.1 + katex: 0.16.25 + khroma: 2.1.0 + lodash-es: 4.17.23 + marked: 16.4.2 + roughjs: 4.6.6 + stylis: 4.3.6 + ts-dedent: 2.2.0 + uuid: 11.1.0 + micromark-core-commonmark@2.0.3: dependencies: decode-named-character-reference: 1.2.0 @@ -14568,6 +15530,38 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 + micromark-extension-cjk-friendly-gfm-strikethrough@1.2.3(micromark-util-types@2.0.2)(micromark@4.0.2): + dependencies: + devlop: 1.1.0 + get-east-asian-width: 1.4.0 + micromark: 4.0.2 + micromark-extension-cjk-friendly-util: 2.1.1(micromark-util-types@2.0.2) + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + optionalDependencies: + micromark-util-types: 2.0.2 + + micromark-extension-cjk-friendly-util@2.1.1(micromark-util-types@2.0.2): + dependencies: + get-east-asian-width: 1.4.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + optionalDependencies: + micromark-util-types: 2.0.2 + + micromark-extension-cjk-friendly@1.2.3(micromark-util-types@2.0.2)(micromark@4.0.2): + dependencies: + devlop: 1.1.0 + micromark: 4.0.2 + micromark-extension-cjk-friendly-util: 2.1.1(micromark-util-types@2.0.2) + micromark-util-chunked: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + optionalDependencies: + micromark-util-types: 2.0.2 + micromark-extension-gfm-autolink-literal@2.1.0: dependencies: micromark-util-character: 2.1.1 @@ -14790,9 +15784,14 @@ snapshots: minipass@7.1.2: {} - module-details-from-path@1.0.4: {} + mlly@1.8.0: + dependencies: + acorn: 8.15.0 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.3 - moment@2.30.1: {} + module-details-from-path@1.0.4: {} motion-dom@12.24.8: dependencies: @@ -15049,6 +16048,14 @@ snapshots: dependencies: mimic-fn: 2.1.0 + oniguruma-parser@0.12.1: {} + + oniguruma-to-es@4.3.4: + dependencies: + oniguruma-parser: 0.12.1 + regex: 6.1.0 + regex-recursion: 6.0.2 + open@8.4.2: dependencies: define-lazy-prop: 2.0.0 @@ -15143,6 +16150,8 @@ snapshots: package-json-from-dist@1.0.1: {} + package-manager-detector@1.6.0: {} + pako@1.0.11: {} param-case@3.0.4: @@ -15197,6 +16206,8 @@ snapshots: path-browserify@1.0.1: {} + path-data-parser@0.1.0: {} + path-exists@4.0.0: {} path-exists@5.0.0: {} @@ -15259,6 +16270,12 @@ snapshots: dependencies: find-up: 6.3.0 + pkg-types@1.3.1: + dependencies: + confbox: 0.1.8 + mlly: 1.8.0 + pathe: 2.0.3 + playwright-core@1.56.1: {} playwright@1.56.1: @@ -15267,6 +16284,13 @@ snapshots: optionalDependencies: fsevents: 2.3.2 + points-on-curve@0.2.0: {} + + points-on-path@0.2.1: + dependencies: + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + pony-cause@1.1.1: {} possible-typed-array-names@1.1.0: {} @@ -15693,6 +16717,16 @@ snapshots: regex-parser@2.3.1: {} + regex-recursion@6.0.2: + dependencies: + regex-utilities: 2.3.0 + + regex-utilities@2.3.0: {} + + regex@6.1.0: + dependencies: + regex-utilities: 2.3.0 + regexp.prototype.flags@1.5.4: dependencies: call-bind: 1.0.8 @@ -15726,6 +16760,10 @@ snapshots: unified: 11.0.5 unist-util-visit: 5.0.0 + rehype-harden@1.1.7: + dependencies: + unist-util-visit: 5.0.0 + rehype-highlight@7.0.2: dependencies: '@types/hast': 3.0.4 @@ -15744,6 +16782,17 @@ snapshots: unist-util-visit-parents: 6.0.2 vfile: 6.0.3 + rehype-raw@7.0.0: + dependencies: + '@types/hast': 3.0.4 + hast-util-raw: 9.1.0 + vfile: 6.0.3 + + rehype-sanitize@6.0.0: + dependencies: + '@types/hast': 3.0.4 + hast-util-sanitize: 5.0.2 + rehype-slug@6.0.0: dependencies: '@types/hast': 3.0.4 @@ -15754,6 +16803,26 @@ snapshots: relateurl@0.2.7: {} + remark-cjk-friendly-gfm-strikethrough@1.2.3(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(unified@11.0.5): + dependencies: + micromark-extension-cjk-friendly-gfm-strikethrough: 1.2.3(micromark-util-types@2.0.2)(micromark@4.0.2) + unified: 11.0.5 + optionalDependencies: + '@types/mdast': 4.0.4 + transitivePeerDependencies: + - micromark + - micromark-util-types + + remark-cjk-friendly@1.2.3(@types/mdast@4.0.4)(micromark-util-types@2.0.2)(micromark@4.0.2)(unified@11.0.5): + dependencies: + micromark-extension-cjk-friendly: 1.2.3(micromark-util-types@2.0.2)(micromark@4.0.2) + unified: 11.0.5 + optionalDependencies: + '@types/mdast': 4.0.4 + transitivePeerDependencies: + - micromark + - micromark-util-types + remark-gfm@4.0.1: dependencies: '@types/mdast': 4.0.4 @@ -15797,6 +16866,8 @@ snapshots: mdast-util-to-markdown: 2.1.2 unified: 11.0.5 + remend@1.1.0: {} + renderkid@3.0.0: dependencies: css-select: 4.3.0 @@ -15861,6 +16932,8 @@ snapshots: hash-base: 3.1.2 inherits: 2.0.4 + robust-predicates@3.0.2: {} + rollup@4.55.1: dependencies: '@types/estree': 1.0.8 @@ -15892,10 +16965,19 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.55.1 fsevents: 2.3.3 + roughjs@4.6.6: + dependencies: + hachure-fill: 0.5.2 + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + points-on-path: 0.2.1 + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 + rw@1.3.3: {} + rxjs@7.8.2: dependencies: tslib: 2.8.1 @@ -15925,6 +17007,8 @@ snapshots: safe-stable-stringify@1.1.1: {} + safer-buffer@2.1.2: {} + sass-loader@16.0.6(webpack@5.104.1(esbuild@0.25.12)): dependencies: neo-async: 2.6.2 @@ -16037,6 +17121,17 @@ snapshots: '@scarf/scarf': 1.4.0 deepmerge-ts: 7.1.5 + shiki@3.21.0: + dependencies: + '@shikijs/core': 3.21.0 + '@shikijs/engine-javascript': 3.21.0 + '@shikijs/engine-oniguruma': 3.21.0 + '@shikijs/langs': 3.21.0 + '@shikijs/themes': 3.21.0 + '@shikijs/types': 3.21.0 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + should-equal@2.0.0: dependencies: should-type: 1.4.0 @@ -16176,6 +17271,26 @@ snapshots: readable-stream: 3.6.2 xtend: 4.0.2 + streamdown@2.1.0(react@18.3.1): + dependencies: + clsx: 2.1.1 + hast-util-to-jsx-runtime: 2.3.6 + html-url-attributes: 3.0.1 + marked: 17.0.1 + react: 18.3.1 + rehype-harden: 1.1.7 + rehype-raw: 7.0.0 + rehype-sanitize: 6.0.0 + remark-gfm: 4.0.1 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + remend: 1.1.0 + tailwind-merge: 3.4.0 + unified: 11.0.5 + unist-util-visit: 5.0.0 + transitivePeerDependencies: + - supports-color + strict-event-emitter@0.5.1: {} string-argv@0.3.2: {} @@ -16301,6 +17416,8 @@ snapshots: optionalDependencies: '@babel/core': 7.28.5 + stylis@4.3.6: {} + sucrase@3.35.1: dependencies: '@jridgewell/gen-mapping': 0.3.13 @@ -16337,11 +17454,19 @@ snapshots: transitivePeerDependencies: - encoding + swr@2.3.8(react@18.3.1): + dependencies: + dequal: 2.0.3 + react: 18.3.1 + use-sync-external-store: 1.6.0(react@18.3.1) + symbol-tree@3.2.4: optional: true tailwind-merge@2.6.0: {} + tailwind-merge@3.4.0: {} + tailwind-scrollbar@3.1.0(tailwindcss@3.4.17): dependencies: tailwindcss: 3.4.17 @@ -16409,6 +17534,8 @@ snapshots: third-party-capital@1.0.20: {} + throttleit@2.1.0: {} + timers-browserify@2.0.12: dependencies: setimmediate: 1.0.5 @@ -16571,6 +17698,8 @@ snapshots: uc.micro@2.1.0: {} + ufo@1.6.3: {} + unbox-primitive@1.1.0: dependencies: call-bound: 1.0.4 @@ -16708,6 +17837,10 @@ snapshots: optionalDependencies: '@types/react': 18.3.17 + use-stick-to-bottom@1.1.2(react@18.3.1): + dependencies: + react: 18.3.1 + use-sync-external-store@1.6.0(react@18.3.1): dependencies: react: 18.3.1 @@ -16843,6 +17976,23 @@ snapshots: vm-browserify@1.1.2: {} + vscode-jsonrpc@8.2.0: {} + + vscode-languageserver-protocol@3.17.5: + dependencies: + vscode-jsonrpc: 8.2.0 + vscode-languageserver-types: 3.17.5 + + vscode-languageserver-textdocument@1.0.12: {} + + vscode-languageserver-types@3.17.5: {} + + vscode-languageserver@9.0.1: + dependencies: + vscode-languageserver-protocol: 3.17.5 + + vscode-uri@3.0.8: {} + w3c-xmlserializer@5.0.0: dependencies: xml-name-validator: 5.0.0 diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearchBar/useBlockMenuSearchBar.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearchBar/useBlockMenuSearchBar.ts index ab1af16584..e98d240215 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearchBar/useBlockMenuSearchBar.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearchBar/useBlockMenuSearchBar.ts @@ -1,4 +1,4 @@ -import { debounce } from "lodash"; +import debounce from "lodash/debounce"; import { useCallback, useEffect, useRef, useState } from "react"; import { useBlockMenuStore } from "../../../../stores/blockMenuStore"; import { getQueryClient } from "@/lib/react-query/queryClient"; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/HorizontalScroll.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/HorizontalScroll.tsx index 0f953394e6..bee0b85721 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/HorizontalScroll.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/HorizontalScroll.tsx @@ -70,10 +70,10 @@ export const HorizontalScroll: React.FC = ({ {children} {canScrollLeft && ( -
+
)} {canScrollRight && ( -
+
)} {canScrollLeft && ( +
+ + + )} + + {!isCollapsed && ( + + + Your chats + +
+ +
+
+ )} + + {!isCollapsed && ( + + {isLoadingSessions ? ( +
+ +
+ ) : sessions.length === 0 ? ( +

+ No conversations yet +

+ ) : ( + sessions.map((session) => ( + + )) + )} +
+ )} +
+ {!isCollapsed && sessionId && ( + + + + + + )} + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/CopilotChatActionsProvider.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/CopilotChatActionsProvider.tsx new file mode 100644 index 0000000000..5c80348e8c --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/CopilotChatActionsProvider.tsx @@ -0,0 +1,16 @@ +"use client"; + +import { CopilotChatActionsContext } from "./useCopilotChatActions"; + +interface Props { + onSend: (message: string) => void | Promise; + children: React.ReactNode; +} + +export function CopilotChatActionsProvider({ onSend, children }: Props) { + return ( + + {children} + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/useCopilotChatActions.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/useCopilotChatActions.ts new file mode 100644 index 0000000000..31b27c0f6e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotChatActionsProvider/useCopilotChatActions.ts @@ -0,0 +1,23 @@ +"use client"; + +import { createContext, useContext } from "react"; + +interface CopilotChatActions { + onSend: (message: string) => void | Promise; +} + +const CopilotChatActionsContext = createContext( + null, +); + +export function useCopilotChatActions(): CopilotChatActions { + const ctx = useContext(CopilotChatActionsContext); + if (!ctx) { + throw new Error( + "useCopilotChatActions must be used within CopilotChatActionsProvider", + ); + } + return ctx; +} + +export { CopilotChatActionsContext }; diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/CopilotShell.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/CopilotShell.tsx deleted file mode 100644 index 3f695da5ed..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/CopilotShell.tsx +++ /dev/null @@ -1,99 +0,0 @@ -"use client"; - -import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader"; -import { Text } from "@/components/atoms/Text/Text"; -import { NAVBAR_HEIGHT_PX } from "@/lib/constants"; -import type { ReactNode } from "react"; -import { DesktopSidebar } from "./components/DesktopSidebar/DesktopSidebar"; -import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer"; -import { MobileHeader } from "./components/MobileHeader/MobileHeader"; -import { useCopilotShell } from "./useCopilotShell"; - -interface Props { - children: ReactNode; -} - -export function CopilotShell({ children }: Props) { - const { - isMobile, - isDrawerOpen, - isLoading, - isCreatingSession, - isLoggedIn, - hasActiveSession, - sessions, - currentSessionId, - handleOpenDrawer, - handleCloseDrawer, - handleDrawerOpenChange, - handleNewChatClick, - handleSessionClick, - hasNextPage, - isFetchingNextPage, - fetchNextPage, - } = useCopilotShell(); - - if (!isLoggedIn) { - return ( -
- -
- ); - } - - return ( -
- {!isMobile && ( - - )} - -
- {isMobile && } -
- {isCreatingSession ? ( -
-
- - - Creating your chat... - -
-
- ) : ( - children - )} -
-
- - {isMobile && ( - - )} -
- ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/DesktopSidebar/DesktopSidebar.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/DesktopSidebar/DesktopSidebar.tsx deleted file mode 100644 index 122a09a02f..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/DesktopSidebar/DesktopSidebar.tsx +++ /dev/null @@ -1,70 +0,0 @@ -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { Button } from "@/components/atoms/Button/Button"; -import { Text } from "@/components/atoms/Text/Text"; -import { scrollbarStyles } from "@/components/styles/scrollbars"; -import { cn } from "@/lib/utils"; -import { Plus } from "@phosphor-icons/react"; -import { SessionsList } from "../SessionsList/SessionsList"; - -interface Props { - sessions: SessionSummaryResponse[]; - currentSessionId: string | null; - isLoading: boolean; - hasNextPage: boolean; - isFetchingNextPage: boolean; - onSelectSession: (sessionId: string) => void; - onFetchNextPage: () => void; - onNewChat: () => void; - hasActiveSession: boolean; -} - -export function DesktopSidebar({ - sessions, - currentSessionId, - isLoading, - hasNextPage, - isFetchingNextPage, - onSelectSession, - onFetchNextPage, - onNewChat, - hasActiveSession, -}: Props) { - return ( - - ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/MobileDrawer.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/MobileDrawer.tsx deleted file mode 100644 index ea3b39f829..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/MobileDrawer.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { Button } from "@/components/atoms/Button/Button"; -import { scrollbarStyles } from "@/components/styles/scrollbars"; -import { cn } from "@/lib/utils"; -import { PlusIcon, X } from "@phosphor-icons/react"; -import { Drawer } from "vaul"; -import { SessionsList } from "../SessionsList/SessionsList"; - -interface Props { - isOpen: boolean; - sessions: SessionSummaryResponse[]; - currentSessionId: string | null; - isLoading: boolean; - hasNextPage: boolean; - isFetchingNextPage: boolean; - onSelectSession: (sessionId: string) => void; - onFetchNextPage: () => void; - onNewChat: () => void; - onClose: () => void; - onOpenChange: (open: boolean) => void; - hasActiveSession: boolean; -} - -export function MobileDrawer({ - isOpen, - sessions, - currentSessionId, - isLoading, - hasNextPage, - isFetchingNextPage, - onSelectSession, - onFetchNextPage, - onNewChat, - onClose, - onOpenChange, - hasActiveSession, -}: Props) { - return ( - - - - -
-
- - Your chats - - -
-
-
- -
- {hasActiveSession && ( -
- -
- )} -
-
-
- ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/useMobileDrawer.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/useMobileDrawer.ts deleted file mode 100644 index 2ef63a4422..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileDrawer/useMobileDrawer.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { useState } from "react"; - -export function useMobileDrawer() { - const [isDrawerOpen, setIsDrawerOpen] = useState(false); - - const handleOpenDrawer = () => { - setIsDrawerOpen(true); - }; - - const handleCloseDrawer = () => { - setIsDrawerOpen(false); - }; - - const handleDrawerOpenChange = (open: boolean) => { - setIsDrawerOpen(open); - }; - - return { - isDrawerOpen, - handleOpenDrawer, - handleCloseDrawer, - handleDrawerOpenChange, - }; -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/SessionsList.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/SessionsList.tsx deleted file mode 100644 index ef63e1aff4..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/SessionsList.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { Skeleton } from "@/components/__legacy__/ui/skeleton"; -import { Text } from "@/components/atoms/Text/Text"; -import { InfiniteList } from "@/components/molecules/InfiniteList/InfiniteList"; -import { cn } from "@/lib/utils"; -import { getSessionTitle } from "../../helpers"; - -interface Props { - sessions: SessionSummaryResponse[]; - currentSessionId: string | null; - isLoading: boolean; - hasNextPage: boolean; - isFetchingNextPage: boolean; - onSelectSession: (sessionId: string) => void; - onFetchNextPage: () => void; -} - -export function SessionsList({ - sessions, - currentSessionId, - isLoading, - hasNextPage, - isFetchingNextPage, - onSelectSession, - onFetchNextPage, -}: Props) { - if (isLoading) { - return ( -
- {Array.from({ length: 5 }).map((_, i) => ( -
- -
- ))} -
- ); - } - - if (sessions.length === 0) { - return ( -
- - You don't have previous chats - -
- ); - } - - return ( - { - const isActive = session.id === currentSessionId; - return ( - - ); - }} - /> - ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/useSessionsPagination.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/useSessionsPagination.ts deleted file mode 100644 index 61e3e6f37f..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/SessionsList/useSessionsPagination.ts +++ /dev/null @@ -1,91 +0,0 @@ -import { useGetV2ListSessions } from "@/app/api/__generated__/endpoints/chat/chat"; -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { okData } from "@/app/api/helpers"; -import { useEffect, useState } from "react"; - -const PAGE_SIZE = 50; - -export interface UseSessionsPaginationArgs { - enabled: boolean; -} - -export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) { - const [offset, setOffset] = useState(0); - - const [accumulatedSessions, setAccumulatedSessions] = useState< - SessionSummaryResponse[] - >([]); - - const [totalCount, setTotalCount] = useState(null); - - const { data, isLoading, isFetching, isError } = useGetV2ListSessions( - { limit: PAGE_SIZE, offset }, - { - query: { - enabled: enabled && offset >= 0, - }, - }, - ); - - useEffect(() => { - const responseData = okData(data); - if (responseData) { - const newSessions = responseData.sessions; - const total = responseData.total; - setTotalCount(total); - - if (offset === 0) { - setAccumulatedSessions(newSessions); - } else { - setAccumulatedSessions((prev) => [...prev, ...newSessions]); - } - } else if (!enabled) { - setAccumulatedSessions([]); - setTotalCount(null); - } - }, [data, offset, enabled]); - - const hasNextPage = - totalCount !== null && accumulatedSessions.length < totalCount; - - const areAllSessionsLoaded = - totalCount !== null && - accumulatedSessions.length >= totalCount && - !isFetching && - !isLoading; - - useEffect(() => { - if ( - hasNextPage && - !isFetching && - !isLoading && - !isError && - totalCount !== null - ) { - setOffset((prev) => prev + PAGE_SIZE); - } - }, [hasNextPage, isFetching, isLoading, isError, totalCount]); - - const fetchNextPage = () => { - if (hasNextPage && !isFetching) { - setOffset((prev) => prev + PAGE_SIZE); - } - }; - - const reset = () => { - // Only reset the offset - keep existing sessions visible during refetch - // The effect will replace sessions when new data arrives at offset 0 - setOffset(0); - }; - - return { - sessions: accumulatedSessions, - isLoading, - isFetching, - hasNextPage, - areAllSessionsLoaded, - totalCount, - fetchNextPage, - reset, - }; -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/helpers.ts deleted file mode 100644 index ef0d414edf..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/helpers.ts +++ /dev/null @@ -1,106 +0,0 @@ -import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { format, formatDistanceToNow, isToday } from "date-fns"; - -export function convertSessionDetailToSummary(session: SessionDetailResponse) { - return { - id: session.id, - created_at: session.created_at, - updated_at: session.updated_at, - title: undefined, - }; -} - -export function filterVisibleSessions(sessions: SessionSummaryResponse[]) { - const fiveMinutesAgo = Date.now() - 5 * 60 * 1000; - return sessions.filter((session) => { - const hasBeenUpdated = session.updated_at !== session.created_at; - - if (hasBeenUpdated) return true; - - const isRecentlyCreated = - new Date(session.created_at).getTime() > fiveMinutesAgo; - - return isRecentlyCreated; - }); -} - -export function getSessionTitle(session: SessionSummaryResponse) { - if (session.title) return session.title; - - const isNewSession = session.updated_at === session.created_at; - - if (isNewSession) { - const createdDate = new Date(session.created_at); - if (isToday(createdDate)) { - return "Today"; - } - return format(createdDate, "MMM d, yyyy"); - } - - return "Untitled Chat"; -} - -export function getSessionUpdatedLabel(session: SessionSummaryResponse) { - if (!session.updated_at) return ""; - return formatDistanceToNow(new Date(session.updated_at), { addSuffix: true }); -} - -export function mergeCurrentSessionIntoList( - accumulatedSessions: SessionSummaryResponse[], - currentSessionId: string | null, - currentSessionData: SessionDetailResponse | null | undefined, - recentlyCreatedSessions?: Map, -) { - const filteredSessions: SessionSummaryResponse[] = []; - const addedIds = new Set(); - - if (accumulatedSessions.length > 0) { - const visibleSessions = filterVisibleSessions(accumulatedSessions); - - if (currentSessionId) { - const currentInAll = accumulatedSessions.find( - (s) => s.id === currentSessionId, - ); - if (currentInAll) { - const isInVisible = visibleSessions.some( - (s) => s.id === currentSessionId, - ); - if (!isInVisible) { - filteredSessions.push(currentInAll); - addedIds.add(currentInAll.id); - } - } - } - - for (const session of visibleSessions) { - if (!addedIds.has(session.id)) { - filteredSessions.push(session); - addedIds.add(session.id); - } - } - } - - if (currentSessionId && currentSessionData) { - if (!addedIds.has(currentSessionId)) { - const summarySession = convertSessionDetailToSummary(currentSessionData); - filteredSessions.unshift(summarySession); - addedIds.add(currentSessionId); - } - } - - if (recentlyCreatedSessions) { - for (const [sessionId, sessionData] of recentlyCreatedSessions) { - if (!addedIds.has(sessionId)) { - filteredSessions.unshift(sessionData); - addedIds.add(sessionId); - } - } - } - - return filteredSessions; -} - -export function getCurrentSessionId(searchParams: URLSearchParams) { - return searchParams.get("sessionId"); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useCopilotShell.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useCopilotShell.ts deleted file mode 100644 index 913c4d7ded..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useCopilotShell.ts +++ /dev/null @@ -1,124 +0,0 @@ -"use client"; - -import { - getGetV2GetSessionQueryKey, - getGetV2ListSessionsQueryKey, - useGetV2GetSession, -} from "@/app/api/__generated__/endpoints/chat/chat"; -import { okData } from "@/app/api/helpers"; -import { useChatStore } from "@/components/contextual/Chat/chat-store"; -import { useBreakpoint } from "@/lib/hooks/useBreakpoint"; -import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; -import { useQueryClient } from "@tanstack/react-query"; -import { usePathname, useSearchParams } from "next/navigation"; -import { useCopilotStore } from "../../copilot-page-store"; -import { useCopilotSessionId } from "../../useCopilotSessionId"; -import { useMobileDrawer } from "./components/MobileDrawer/useMobileDrawer"; -import { getCurrentSessionId } from "./helpers"; -import { useShellSessionList } from "./useShellSessionList"; - -export function useCopilotShell() { - const pathname = usePathname(); - const searchParams = useSearchParams(); - const queryClient = useQueryClient(); - const breakpoint = useBreakpoint(); - const { isLoggedIn } = useSupabase(); - const isMobile = - breakpoint === "base" || breakpoint === "sm" || breakpoint === "md"; - - const { urlSessionId, setUrlSessionId } = useCopilotSessionId(); - - const isOnHomepage = pathname === "/copilot"; - const paramSessionId = searchParams.get("sessionId"); - - const { - isDrawerOpen, - handleOpenDrawer, - handleCloseDrawer, - handleDrawerOpenChange, - } = useMobileDrawer(); - - const paginationEnabled = !isMobile || isDrawerOpen || !!paramSessionId; - - const currentSessionId = getCurrentSessionId(searchParams); - - const { data: currentSessionData } = useGetV2GetSession( - currentSessionId || "", - { - query: { - enabled: !!currentSessionId, - select: okData, - }, - }, - ); - - const { - sessions, - isLoading, - isSessionsFetching, - hasNextPage, - fetchNextPage, - resetPagination, - recentlyCreatedSessionsRef, - } = useShellSessionList({ - paginationEnabled, - currentSessionId, - currentSessionData, - isOnHomepage, - paramSessionId, - }); - - const stopStream = useChatStore((s) => s.stopStream); - const isCreatingSession = useCopilotStore((s) => s.isCreatingSession); - - function handleSessionClick(sessionId: string) { - if (sessionId === currentSessionId) return; - - // Stop current stream - SSE reconnection allows resuming later - if (currentSessionId) { - stopStream(currentSessionId); - } - - if (recentlyCreatedSessionsRef.current.has(sessionId)) { - queryClient.invalidateQueries({ - queryKey: getGetV2GetSessionQueryKey(sessionId), - }); - } - setUrlSessionId(sessionId, { shallow: false }); - if (isMobile) handleCloseDrawer(); - } - - function handleNewChatClick() { - // Stop current stream - SSE reconnection allows resuming later - if (currentSessionId) { - stopStream(currentSessionId); - } - - resetPagination(); - queryClient.invalidateQueries({ - queryKey: getGetV2ListSessionsQueryKey(), - }); - setUrlSessionId(null, { shallow: false }); - if (isMobile) handleCloseDrawer(); - } - - return { - isMobile, - isDrawerOpen, - isLoggedIn, - hasActiveSession: - Boolean(currentSessionId) && (!isOnHomepage || Boolean(paramSessionId)), - isLoading: isLoading || isCreatingSession, - isCreatingSession, - sessions, - currentSessionId: urlSessionId, - handleOpenDrawer, - handleCloseDrawer, - handleDrawerOpenChange, - handleNewChatClick, - handleSessionClick, - hasNextPage, - isFetchingNextPage: isSessionsFetching, - fetchNextPage, - }; -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useShellSessionList.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useShellSessionList.ts deleted file mode 100644 index fb39a11096..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/useShellSessionList.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { getGetV2ListSessionsQueryKey } from "@/app/api/__generated__/endpoints/chat/chat"; -import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; -import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; -import { useChatStore } from "@/components/contextual/Chat/chat-store"; -import { useQueryClient } from "@tanstack/react-query"; -import { useEffect, useMemo, useRef } from "react"; -import { useSessionsPagination } from "./components/SessionsList/useSessionsPagination"; -import { - convertSessionDetailToSummary, - filterVisibleSessions, - mergeCurrentSessionIntoList, -} from "./helpers"; - -interface UseShellSessionListArgs { - paginationEnabled: boolean; - currentSessionId: string | null; - currentSessionData: SessionDetailResponse | null | undefined; - isOnHomepage: boolean; - paramSessionId: string | null; -} - -export function useShellSessionList({ - paginationEnabled, - currentSessionId, - currentSessionData, - isOnHomepage, - paramSessionId, -}: UseShellSessionListArgs) { - const queryClient = useQueryClient(); - const onStreamComplete = useChatStore((s) => s.onStreamComplete); - - const { - sessions: accumulatedSessions, - isLoading: isSessionsLoading, - isFetching: isSessionsFetching, - hasNextPage, - fetchNextPage, - reset: resetPagination, - } = useSessionsPagination({ - enabled: paginationEnabled, - }); - - const recentlyCreatedSessionsRef = useRef< - Map - >(new Map()); - - useEffect(() => { - if (isOnHomepage && !paramSessionId) { - queryClient.invalidateQueries({ - queryKey: getGetV2ListSessionsQueryKey(), - }); - } - }, [isOnHomepage, paramSessionId, queryClient]); - - useEffect(() => { - if (currentSessionId && currentSessionData) { - const isNewSession = - currentSessionData.updated_at === currentSessionData.created_at; - const isNotInAccumulated = !accumulatedSessions.some( - (s) => s.id === currentSessionId, - ); - if (isNewSession || isNotInAccumulated) { - const summary = convertSessionDetailToSummary(currentSessionData); - recentlyCreatedSessionsRef.current.set(currentSessionId, summary); - } - } - }, [currentSessionId, currentSessionData, accumulatedSessions]); - - useEffect(() => { - for (const sessionId of recentlyCreatedSessionsRef.current.keys()) { - if (accumulatedSessions.some((s) => s.id === sessionId)) { - recentlyCreatedSessionsRef.current.delete(sessionId); - } - } - }, [accumulatedSessions]); - - useEffect(() => { - const unsubscribe = onStreamComplete(() => { - queryClient.invalidateQueries({ - queryKey: getGetV2ListSessionsQueryKey(), - }); - }); - return unsubscribe; - }, [onStreamComplete, queryClient]); - - const sessions = useMemo( - () => - mergeCurrentSessionIntoList( - accumulatedSessions, - currentSessionId, - currentSessionData, - recentlyCreatedSessionsRef.current, - ), - [accumulatedSessions, currentSessionId, currentSessionData], - ); - - const visibleSessions = useMemo( - () => filterVisibleSessions(sessions), - [sessions], - ); - - const isLoading = isSessionsLoading && accumulatedSessions.length === 0; - - return { - sessions: visibleSessions, - isLoading, - isSessionsFetching, - hasNextPage, - fetchNextPage, - resetPagination, - recentlyCreatedSessionsRef, - }; -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/EmptySession.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/EmptySession.tsx new file mode 100644 index 0000000000..cbbe6c570e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/EmptySession.tsx @@ -0,0 +1,111 @@ +"use client"; + +import { ChatInput } from "@/app/(platform)/copilot/components/ChatInput/ChatInput"; +import { Button } from "@/components/atoms/Button/Button"; +import { Text } from "@/components/atoms/Text/Text"; +import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; +import { SpinnerGapIcon } from "@phosphor-icons/react"; +import { motion } from "framer-motion"; +import { useEffect, useState } from "react"; +import { + getGreetingName, + getInputPlaceholder, + getQuickActions, +} from "./helpers"; + +interface Props { + inputLayoutId: string; + isCreatingSession: boolean; + onCreateSession: () => void | Promise; + onSend: (message: string) => void | Promise; +} + +export function EmptySession({ + inputLayoutId, + isCreatingSession, + onSend, +}: Props) { + const { user } = useSupabase(); + const greetingName = getGreetingName(user); + const quickActions = getQuickActions(); + const [loadingAction, setLoadingAction] = useState(null); + const [inputPlaceholder, setInputPlaceholder] = useState( + getInputPlaceholder(), + ); + + useEffect(() => { + setInputPlaceholder(getInputPlaceholder(window.innerWidth)); + }, [window.innerWidth]); + + async function handleQuickActionClick(action: string) { + if (isCreatingSession || loadingAction) return; + + setLoadingAction(action); + try { + await onSend(action); + } finally { + setLoadingAction(null); + } + } + + return ( +
+ +
+ + Hey, {greetingName} + + + Tell me about your work — I'll find what to automate. + + +
+ + + +
+
+ +
+ {quickActions.map((action) => ( + + ))} +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/helpers.ts similarity index 72% rename from autogpt_platform/frontend/src/app/(platform)/copilot/helpers.ts rename to autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/helpers.ts index c6e479f896..f6f8980fd4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/helpers.ts +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/EmptySession/helpers.ts @@ -1,6 +1,26 @@ -import type { User } from "@supabase/supabase-js"; +import { User } from "@supabase/supabase-js"; -export function getGreetingName(user?: User | null): string { +export function getInputPlaceholder(width?: number) { + if (!width) return "What's your role and what eats up most of your day?"; + + if (width < 500) { + return "I'm a chef and I hate..."; + } + if (width <= 1080) { + return "What's your role and what eats up most of your day?"; + } + return "What's your role and what eats up most of your day? e.g. 'I'm a recruiter and I hate...'"; +} + +export function getQuickActions() { + return [ + "I don't know where to start, just ask me stuff", + "I do the same thing every week and it's killing me", + "Help me find where I'm wasting my time", + ]; +} + +export function getGreetingName(user?: User | null) { if (!user) return "there"; const metadata = user.user_metadata as Record | undefined; const fullName = metadata?.full_name; @@ -16,30 +36,3 @@ export function getGreetingName(user?: User | null): string { } return "there"; } - -export function buildCopilotChatUrl(prompt: string): string { - const trimmed = prompt.trim(); - if (!trimmed) return "/copilot/chat"; - const encoded = encodeURIComponent(trimmed); - return `/copilot/chat?prompt=${encoded}`; -} - -export function getQuickActions(): string[] { - return [ - "I don't know where to start, just ask me stuff", - "I do the same thing every week and it's killing me", - "Help me find where I'm wasting my time", - ]; -} - -export function getInputPlaceholder(width?: number) { - if (!width) return "What's your role and what eats up most of your day?"; - - if (width < 500) { - return "I'm a chef and I hate..."; - } - if (width <= 1080) { - return "What's your role and what eats up most of your day?"; - } - return "What's your role and what eats up most of your day? e.g. 'I'm a recruiter and I hate...'"; -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileDrawer/MobileDrawer.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileDrawer/MobileDrawer.tsx new file mode 100644 index 0000000000..80ccfc9c03 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileDrawer/MobileDrawer.tsx @@ -0,0 +1,140 @@ +import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse"; +import { Button } from "@/components/atoms/Button/Button"; +import { Text } from "@/components/atoms/Text/Text"; +import { scrollbarStyles } from "@/components/styles/scrollbars"; +import { cn } from "@/lib/utils"; +import { PlusIcon, SpinnerGapIcon, X } from "@phosphor-icons/react"; +import { Drawer } from "vaul"; + +interface Props { + isOpen: boolean; + sessions: SessionSummaryResponse[]; + currentSessionId: string | null; + isLoading: boolean; + onSelectSession: (sessionId: string) => void; + onNewChat: () => void; + onClose: () => void; + onOpenChange: (open: boolean) => void; +} + +function formatDate(dateString: string) { + const date = new Date(dateString); + const now = new Date(); + const diffMs = now.getTime() - date.getTime(); + const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24)); + + if (diffDays === 0) return "Today"; + if (diffDays === 1) return "Yesterday"; + if (diffDays < 7) return `${diffDays} days ago`; + + const day = date.getDate(); + const ordinal = + day % 10 === 1 && day !== 11 + ? "st" + : day % 10 === 2 && day !== 12 + ? "nd" + : day % 10 === 3 && day !== 13 + ? "rd" + : "th"; + const month = date.toLocaleDateString("en-US", { month: "short" }); + const year = date.getFullYear(); + + return `${day}${ordinal} ${month} ${year}`; +} + +export function MobileDrawer({ + isOpen, + sessions, + currentSessionId, + isLoading, + onSelectSession, + onNewChat, + onClose, + onOpenChange, +}: Props) { + return ( + + + + +
+
+ + Your chats + + +
+
+
+ {isLoading ? ( +
+ +
+ ) : sessions.length === 0 ? ( +

+ No conversations yet +

+ ) : ( + sessions.map((session) => ( + + )) + )} +
+ {currentSessionId && ( +
+ +
+ )} +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileHeader/MobileHeader.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileHeader/MobileHeader.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/copilot/components/CopilotShell/components/MobileHeader/MobileHeader.tsx rename to autogpt_platform/frontend/src/app/(platform)/copilot/components/MobileHeader/MobileHeader.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/MorphingTextAnimation/MorphingTextAnimation.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/MorphingTextAnimation/MorphingTextAnimation.tsx new file mode 100644 index 0000000000..aac615bb2b --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/MorphingTextAnimation/MorphingTextAnimation.tsx @@ -0,0 +1,54 @@ +import { cn } from "@/lib/utils"; +import { AnimatePresence, motion } from "framer-motion"; + +interface Props { + text: string; + className?: string; +} + +export function MorphingTextAnimation({ text, className }: Props) { + const letters = text.split(""); + + return ( +
+ + + + {letters.map((char, index) => ( + + {char === " " ? "\u00A0" : char} + + ))} + + + +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.module.css b/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.module.css new file mode 100644 index 0000000000..cd04402348 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.module.css @@ -0,0 +1,69 @@ +.loader { + position: relative; + animation: rotate 1s infinite; +} + +.loader::before, +.loader::after { + border-radius: 50%; + content: ""; + display: block; + /* 40% of container size */ + height: 40%; + width: 40%; +} + +.loader::before { + animation: ball1 1s infinite; + background-color: #a1a1aa; /* zinc-400 */ + box-shadow: calc(var(--spacing)) 0 0 #18181b; /* zinc-900 */ + margin-bottom: calc(var(--gap)); +} + +.loader::after { + animation: ball2 1s infinite; + background-color: #18181b; /* zinc-900 */ + box-shadow: calc(var(--spacing)) 0 0 #a1a1aa; /* zinc-400 */ +} + +@keyframes rotate { + 0% { + transform: rotate(0deg) scale(0.8); + } + 50% { + transform: rotate(360deg) scale(1.2); + } + 100% { + transform: rotate(720deg) scale(0.8); + } +} + +@keyframes ball1 { + 0% { + box-shadow: calc(var(--spacing)) 0 0 #18181b; + } + 50% { + box-shadow: 0 0 0 #18181b; + margin-bottom: 0; + transform: translate(calc(var(--spacing) / 2), calc(var(--spacing) / 2)); + } + 100% { + box-shadow: calc(var(--spacing)) 0 0 #18181b; + margin-bottom: calc(var(--gap)); + } +} + +@keyframes ball2 { + 0% { + box-shadow: calc(var(--spacing)) 0 0 #a1a1aa; + } + 50% { + box-shadow: 0 0 0 #a1a1aa; + margin-top: calc(var(--ball-size) * -1); + transform: translate(calc(var(--spacing) / 2), calc(var(--spacing) / 2)); + } + 100% { + box-shadow: calc(var(--spacing)) 0 0 #a1a1aa; + margin-top: 0; + } +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.tsx new file mode 100644 index 0000000000..cc47c16132 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/OrbitLoader/OrbitLoader.tsx @@ -0,0 +1,28 @@ +import { cn } from "@/lib/utils"; +import styles from "./OrbitLoader.module.css"; + +interface Props { + size?: number; + className?: string; +} + +export function OrbitLoader({ size = 24, className }: Props) { + const ballSize = Math.round(size * 0.4); + const spacing = Math.round(size * 0.6); + const gap = Math.round(size * 0.2); + + return ( +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/ProgressBar/ProgressBar.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ProgressBar/ProgressBar.tsx new file mode 100644 index 0000000000..d251b08640 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ProgressBar/ProgressBar.tsx @@ -0,0 +1,26 @@ +import { cn } from "@/lib/utils"; + +interface Props { + value: number; + label?: string; + className?: string; +} + +export function ProgressBar({ value, label, className }: Props) { + const clamped = Math.min(100, Math.max(0, value)); + + return ( +
+
+ {label ?? "Working on it..."} + {Math.round(clamped)}% +
+
+
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.module.css b/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.module.css new file mode 100644 index 0000000000..77ab5ae931 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.module.css @@ -0,0 +1,34 @@ +.loader { + position: relative; + display: inline-block; + flex-shrink: 0; +} + +.loader::before, +.loader::after { + content: ""; + box-sizing: border-box; + width: 100%; + height: 100%; + border-radius: 50%; + background: currentColor; + position: absolute; + left: 0; + top: 0; + animation: ripple 2s linear infinite; +} + +.loader::after { + animation-delay: 1s; +} + +@keyframes ripple { + 0% { + transform: scale(0); + opacity: 1; + } + 100% { + transform: scale(1); + opacity: 0; + } +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.tsx new file mode 100644 index 0000000000..599874daaa --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/PulseLoader/PulseLoader.tsx @@ -0,0 +1,16 @@ +import { cn } from "@/lib/utils"; +import styles from "./PulseLoader.module.css"; + +interface Props { + size?: number; + className?: string; +} + +export function PulseLoader({ size = 24, className }: Props) { + return ( +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.module.css b/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.module.css new file mode 100644 index 0000000000..ee456bfac4 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.module.css @@ -0,0 +1,57 @@ +.loader { + position: relative; + display: inline-block; + flex-shrink: 0; + transform: rotateZ(45deg); + perspective: 1000px; + border-radius: 50%; + color: currentColor; +} + +.loader::before, +.loader::after { + content: ""; + display: block; + position: absolute; + top: 0; + left: 0; + width: inherit; + height: inherit; + border-radius: 50%; + transform: rotateX(70deg); + animation: spin 1s linear infinite; +} + +.loader::after { + color: var(--spinner-accent, #a855f7); + transform: rotateY(70deg); + animation-delay: 0.4s; +} + +@keyframes spin { + 0%, + 100% { + box-shadow: 0.2em 0 0 0 currentColor; + } + 12% { + box-shadow: 0.2em 0.2em 0 0 currentColor; + } + 25% { + box-shadow: 0 0.2em 0 0 currentColor; + } + 37% { + box-shadow: -0.2em 0.2em 0 0 currentColor; + } + 50% { + box-shadow: -0.2em 0 0 0 currentColor; + } + 62% { + box-shadow: -0.2em -0.2em 0 0 currentColor; + } + 75% { + box-shadow: 0 -0.2em 0 0 currentColor; + } + 87% { + box-shadow: 0.2em -0.2em 0 0 currentColor; + } +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.tsx new file mode 100644 index 0000000000..d921b5f778 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/SpinnerLoader/SpinnerLoader.tsx @@ -0,0 +1,16 @@ +import { cn } from "@/lib/utils"; +import styles from "./SpinnerLoader.module.css"; + +interface Props { + size?: number; + className?: string; +} + +export function SpinnerLoader({ size = 24, className }: Props) { + return ( +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/AccordionContent.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/AccordionContent.tsx new file mode 100644 index 0000000000..987941eee1 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/AccordionContent.tsx @@ -0,0 +1,235 @@ +import { Link } from "@/components/atoms/Link/Link"; +import { Text } from "@/components/atoms/Text/Text"; +import { cn } from "@/lib/utils"; + +/* ------------------------------------------------------------------ */ +/* Layout */ +/* ------------------------------------------------------------------ */ + +export function ContentGrid({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return
{children}
; +} + +/* ------------------------------------------------------------------ */ +/* Card */ +/* ------------------------------------------------------------------ */ + +export function ContentCard({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( +
+
{children}
+
+ ); +} + +/** Flex row with a left content area (`children`) and an optional right‑side `action`. */ +export function ContentCardHeader({ + children, + action, + className, +}: { + children: React.ReactNode; + action?: React.ReactNode; + className?: string; +}) { + return ( +
+
{children}
+ {action} +
+ ); +} + +export function ContentCardTitle({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +export function ContentCardSubtitle({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +export function ContentCardDescription({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +/* ------------------------------------------------------------------ */ +/* Text */ +/* ------------------------------------------------------------------ */ + +export function ContentMessage({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +export function ContentHint({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +/* ------------------------------------------------------------------ */ +/* Code / data */ +/* ------------------------------------------------------------------ */ + +export function ContentCodeBlock({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( +
+      {children}
+    
+ ); +} + +/* ------------------------------------------------------------------ */ +/* Inline elements */ +/* ------------------------------------------------------------------ */ + +export function ContentBadge({ + children, + className, +}: { + children: React.ReactNode; + className?: string; +}) { + return ( + + {children} + + ); +} + +export function ContentLink({ + href, + children, + className, + ...rest +}: Omit, "className"> & { + className?: string; +}) { + return ( + + {children} + + ); +} + +/* ------------------------------------------------------------------ */ +/* Lists */ +/* ------------------------------------------------------------------ */ + +export function ContentSuggestionsList({ + items, + max = 5, + className, +}: { + items: string[]; + max?: number; + className?: string; +}) { + if (items.length === 0) return null; + return ( +
    + {items.slice(0, max).map((s) => ( +
  • {s}
  • + ))} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/ToolAccordion.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/ToolAccordion.tsx new file mode 100644 index 0000000000..e53df15e6c --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/ToolAccordion.tsx @@ -0,0 +1,102 @@ +"use client"; + +import { cn } from "@/lib/utils"; +import { CaretDownIcon } from "@phosphor-icons/react"; +import { AnimatePresence, motion, useReducedMotion } from "framer-motion"; +import { useId } from "react"; +import { useToolAccordion } from "./useToolAccordion"; + +interface Props { + icon: React.ReactNode; + title: React.ReactNode; + titleClassName?: string; + description?: React.ReactNode; + children: React.ReactNode; + className?: string; + defaultExpanded?: boolean; + expanded?: boolean; + onExpandedChange?: (expanded: boolean) => void; +} + +export function ToolAccordion({ + icon, + title, + titleClassName, + description, + children, + className, + defaultExpanded, + expanded, + onExpandedChange, +}: Props) { + const shouldReduceMotion = useReducedMotion(); + const contentId = useId(); + const { isExpanded, toggle } = useToolAccordion({ + expanded, + defaultExpanded, + onExpandedChange, + }); + + return ( +
+ + + + {isExpanded && ( + +
{children}
+
+ )} +
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/useToolAccordion.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/useToolAccordion.ts new file mode 100644 index 0000000000..bc2a177e8d --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/components/ToolAccordion/useToolAccordion.ts @@ -0,0 +1,32 @@ +import { useState } from "react"; + +interface UseToolAccordionOptions { + expanded?: boolean; + defaultExpanded?: boolean; + onExpandedChange?: (expanded: boolean) => void; +} + +interface UseToolAccordionResult { + isExpanded: boolean; + toggle: () => void; +} + +export function useToolAccordion({ + expanded, + defaultExpanded = false, + onExpandedChange, +}: UseToolAccordionOptions): UseToolAccordionResult { + const [uncontrolledExpanded, setUncontrolledExpanded] = + useState(defaultExpanded); + + const isControlled = typeof expanded === "boolean"; + const isExpanded = isControlled ? expanded : uncontrolledExpanded; + + function toggle() { + const next = !isExpanded; + if (!isControlled) setUncontrolledExpanded(next); + onExpandedChange?.(next); + } + + return { isExpanded, toggle }; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/copilot-page-store.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/copilot-page-store.ts deleted file mode 100644 index 9fc97a14e3..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/copilot-page-store.ts +++ /dev/null @@ -1,56 +0,0 @@ -"use client"; - -import { create } from "zustand"; - -interface CopilotStoreState { - isStreaming: boolean; - isSwitchingSession: boolean; - isCreatingSession: boolean; - isInterruptModalOpen: boolean; - pendingAction: (() => void) | null; -} - -interface CopilotStoreActions { - setIsStreaming: (isStreaming: boolean) => void; - setIsSwitchingSession: (isSwitchingSession: boolean) => void; - setIsCreatingSession: (isCreating: boolean) => void; - openInterruptModal: (onConfirm: () => void) => void; - confirmInterrupt: () => void; - cancelInterrupt: () => void; -} - -type CopilotStore = CopilotStoreState & CopilotStoreActions; - -export const useCopilotStore = create((set, get) => ({ - isStreaming: false, - isSwitchingSession: false, - isCreatingSession: false, - isInterruptModalOpen: false, - pendingAction: null, - - setIsStreaming(isStreaming) { - set({ isStreaming }); - }, - - setIsSwitchingSession(isSwitchingSession) { - set({ isSwitchingSession }); - }, - - setIsCreatingSession(isCreatingSession) { - set({ isCreatingSession }); - }, - - openInterruptModal(onConfirm) { - set({ isInterruptModalOpen: true, pendingAction: onConfirm }); - }, - - confirmInterrupt() { - const { pendingAction } = get(); - set({ isInterruptModalOpen: false, pendingAction: null }); - if (pendingAction) pendingAction(); - }, - - cancelInterrupt() { - set({ isInterruptModalOpen: false, pendingAction: null }); - }, -})); diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/helpers/convertChatSessionToUiMessages.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/helpers/convertChatSessionToUiMessages.ts new file mode 100644 index 0000000000..a3f2bc28bf --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/helpers/convertChatSessionToUiMessages.ts @@ -0,0 +1,128 @@ +import type { UIMessage, UIDataTypes, UITools } from "ai"; + +interface SessionChatMessage { + role: string; + content: string | null; + tool_call_id: string | null; + tool_calls: unknown[] | null; +} + +function coerceSessionChatMessages( + rawMessages: unknown[], +): SessionChatMessage[] { + return rawMessages + .map((m) => { + if (!m || typeof m !== "object") return null; + const msg = m as Record; + + const role = typeof msg.role === "string" ? msg.role : null; + if (!role) return null; + + return { + role, + content: + typeof msg.content === "string" + ? msg.content + : msg.content == null + ? null + : String(msg.content), + tool_call_id: + typeof msg.tool_call_id === "string" + ? msg.tool_call_id + : msg.tool_call_id == null + ? null + : String(msg.tool_call_id), + tool_calls: Array.isArray(msg.tool_calls) ? msg.tool_calls : null, + }; + }) + .filter((m): m is SessionChatMessage => m !== null); +} + +function safeJsonParse(value: string): unknown { + try { + return JSON.parse(value) as unknown; + } catch { + return value; + } +} + +function toToolInput(rawArguments: unknown): unknown { + if (typeof rawArguments === "string") { + const trimmed = rawArguments.trim(); + return trimmed ? safeJsonParse(trimmed) : {}; + } + if (rawArguments && typeof rawArguments === "object") return rawArguments; + return {}; +} + +export function convertChatSessionMessagesToUiMessages( + sessionId: string, + rawMessages: unknown[], +): UIMessage[] { + const messages = coerceSessionChatMessages(rawMessages); + const toolOutputsByCallId = new Map(); + + for (const msg of messages) { + if (msg.role !== "tool") continue; + if (!msg.tool_call_id) continue; + if (msg.content == null) continue; + toolOutputsByCallId.set(msg.tool_call_id, msg.content); + } + + const uiMessages: UIMessage[] = []; + + messages.forEach((msg, index) => { + if (msg.role === "tool") return; + if (msg.role !== "user" && msg.role !== "assistant") return; + + const parts: UIMessage["parts"] = []; + + if (typeof msg.content === "string" && msg.content.trim()) { + parts.push({ type: "text", text: msg.content, state: "done" }); + } + + if (msg.role === "assistant" && Array.isArray(msg.tool_calls)) { + for (const rawToolCall of msg.tool_calls) { + if (!rawToolCall || typeof rawToolCall !== "object") continue; + const toolCall = rawToolCall as { + id?: unknown; + function?: { name?: unknown; arguments?: unknown }; + }; + + const toolCallId = String(toolCall.id ?? "").trim(); + const toolName = String(toolCall.function?.name ?? "").trim(); + if (!toolCallId || !toolName) continue; + + const input = toToolInput(toolCall.function?.arguments); + const output = toolOutputsByCallId.get(toolCallId); + + if (output !== undefined) { + parts.push({ + type: `tool-${toolName}`, + toolCallId, + state: "output-available", + input, + output: typeof output === "string" ? safeJsonParse(output) : output, + }); + } else { + parts.push({ + type: `tool-${toolName}`, + toolCallId, + state: "input-available", + input, + }); + } + } + } + + if (parts.length === 0) return; + + uiMessages.push({ + id: `${sessionId}-${index}`, + role: msg.role, + parts, + }); + }); + + return uiMessages; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/useCopilotSessionId.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/hooks/Untitled similarity index 99% rename from autogpt_platform/frontend/src/app/(platform)/copilot/useCopilotSessionId.ts rename to autogpt_platform/frontend/src/app/(platform)/copilot/hooks/Untitled index 87f9b7d3ae..13769eb726 100644 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/useCopilotSessionId.ts +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/hooks/Untitled @@ -7,4 +7,4 @@ export function useCopilotSessionId() { ); return { urlSessionId, setUrlSessionId }; -} +} \ No newline at end of file diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ToolCallMessage/useAsymptoticProgress.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/hooks/useAsymptoticProgress.ts similarity index 83% rename from autogpt_platform/frontend/src/components/contextual/Chat/components/ToolCallMessage/useAsymptoticProgress.ts rename to autogpt_platform/frontend/src/app/(platform)/copilot/hooks/useAsymptoticProgress.ts index cf1b89e7c4..408ec74175 100644 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ToolCallMessage/useAsymptoticProgress.ts +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/hooks/useAsymptoticProgress.ts @@ -5,17 +5,16 @@ import { useEffect, useRef, useState } from "react"; * asymptotically approaching but never reaching the max value. * * Uses a half-life formula: progress = max * (1 - 0.5^(time/halfLife)) - * This creates the "game loading bar" effect where: + * This creates a "loading bar" effect where: * - 50% is reached at halfLifeSeconds * - 75% is reached at 2 * halfLifeSeconds * - 87.5% is reached at 3 * halfLifeSeconds - * - and so on... * * @param isActive - Whether the progress should be animating * @param halfLifeSeconds - Time in seconds to reach 50% progress (default: 30) * @param maxProgress - Maximum progress value to approach (default: 100) * @param intervalMs - Update interval in milliseconds (default: 100) - * @returns Current progress value (0-maxProgress) + * @returns Current progress value (0–maxProgress) */ export function useAsymptoticProgress( isActive: boolean, @@ -35,8 +34,6 @@ export function useAsymptoticProgress( const interval = setInterval(() => { elapsedTimeRef.current += intervalMs / 1000; - // Half-life approach: progress = max * (1 - 0.5^(time/halfLife)) - // At t=halfLife: 50%, at t=2*halfLife: 75%, at t=3*halfLife: 87.5%, etc. const newProgress = maxProgress * (1 - Math.pow(0.5, elapsedTimeRef.current / halfLifeSeconds)); diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/layout.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/layout.tsx deleted file mode 100644 index 876e5accfb..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/layout.tsx +++ /dev/null @@ -1,13 +0,0 @@ -"use client"; -import { FeatureFlagPage } from "@/services/feature-flags/FeatureFlagPage"; -import { Flag } from "@/services/feature-flags/use-get-flag"; -import { type ReactNode } from "react"; -import { CopilotShell } from "./components/CopilotShell/CopilotShell"; - -export default function CopilotLayout({ children }: { children: ReactNode }) { - return ( - - {children} - - ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx index 542173a99c..3e4a81dd51 100644 --- a/autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx @@ -1,149 +1,13 @@ "use client"; -import { Button } from "@/components/atoms/Button/Button"; -import { Skeleton } from "@/components/atoms/Skeleton/Skeleton"; -import { Text } from "@/components/atoms/Text/Text"; -import { Chat } from "@/components/contextual/Chat/Chat"; -import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput"; -import { Dialog } from "@/components/molecules/Dialog/Dialog"; -import { useEffect, useState } from "react"; -import { useCopilotStore } from "./copilot-page-store"; -import { getInputPlaceholder } from "./helpers"; -import { useCopilotPage } from "./useCopilotPage"; - -export default function CopilotPage() { - const { state, handlers } = useCopilotPage(); - const isInterruptModalOpen = useCopilotStore((s) => s.isInterruptModalOpen); - const confirmInterrupt = useCopilotStore((s) => s.confirmInterrupt); - const cancelInterrupt = useCopilotStore((s) => s.cancelInterrupt); - - const [inputPlaceholder, setInputPlaceholder] = useState( - getInputPlaceholder(), - ); - - useEffect(() => { - const handleResize = () => { - setInputPlaceholder(getInputPlaceholder(window.innerWidth)); - }; - - handleResize(); - - window.addEventListener("resize", handleResize); - return () => window.removeEventListener("resize", handleResize); - }, []); - - const { greetingName, quickActions, isLoading, hasSession, initialPrompt } = - state; - - const { - handleQuickAction, - startChatWithPrompt, - handleSessionNotFound, - handleStreamingChange, - } = handlers; - - if (hasSession) { - return ( -
- - { - if (!open) cancelInterrupt(); - }, - }} - onClose={cancelInterrupt} - > - -
- - The current chat response will be interrupted. Are you sure you - want to continue? - - - - - -
-
-
-
- ); - } +import { FeatureFlagPage } from "@/services/feature-flags/FeatureFlagPage"; +import { Flag } from "@/services/feature-flags/use-get-flag"; +import { CopilotPage } from "./CopilotPage"; +export default function Page() { return ( -
-
- {isLoading ? ( -
- - -
- -
-
- {Array.from({ length: 4 }).map((_, i) => ( - - ))} -
-
- ) : ( - <> -
- - Hey, {greetingName} - - - Tell me about your work — I'll find what to automate. - - -
- -
-
-
- {quickActions.map((action) => ( - - ))} -
- - )} -
-
+ + + ); } diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/styleguide/page.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/styleguide/page.tsx new file mode 100644 index 0000000000..6030665f1c --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/styleguide/page.tsx @@ -0,0 +1,1533 @@ +"use client"; + +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import { + Conversation, + ConversationContent, +} from "@/components/ai-elements/conversation"; +import { + Message, + MessageContent, + MessageResponse, +} from "@/components/ai-elements/message"; +import { Text } from "@/components/atoms/Text/Text"; +import { CopilotChatActionsProvider } from "../components/CopilotChatActionsProvider/CopilotChatActionsProvider"; +import { CreateAgentTool } from "../tools/CreateAgent/CreateAgent"; +import { EditAgentTool } from "../tools/EditAgent/EditAgent"; +import { FindAgentsTool } from "../tools/FindAgents/FindAgents"; +import { FindBlocksTool } from "../tools/FindBlocks/FindBlocks"; +import { RunAgentTool } from "../tools/RunAgent/RunAgent"; +import { RunBlockTool } from "../tools/RunBlock/RunBlock"; +import { SearchDocsTool } from "../tools/SearchDocs/SearchDocs"; +import { ViewAgentOutputTool } from "../tools/ViewAgentOutput/ViewAgentOutput"; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function slugify(text: string) { + return text + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/(^-|-$)/g, ""); +} + +const SECTIONS = [ + "Messages", + "Tool: Find Blocks", + "Tool: Find Agents (Marketplace)", + "Tool: Find Agents (Library)", + "Tool: Search Docs", + "Tool: Get Doc Page", + "Tool: Run Block", + "Tool: Run Agent", + "Tool: Schedule Agent", + "Tool: Create Agent", + "Tool: Edit Agent", + "Tool: View Agent Output", + "Full Conversation Example", +] as const; + +function Section({ + title, + children, +}: { + title: string; + children: React.ReactNode; +}) { + return ( +
+

+ {title} +

+
{children}
+
+ ); +} + +function SubSection({ + label, + children, +}: { + label: string; + children: React.ReactNode; +}) { + return ( +
+

+ {label} +

+ {children} +
+ ); +} + +// --------------------------------------------------------------------------- +// Mock data factories +// --------------------------------------------------------------------------- + +let _id = 0; +function uid() { + return `sg-${++_id}`; +} + +// --------------------------------------------------------------------------- +// Page +// --------------------------------------------------------------------------- + +export default function StyleguidePage() { + return ( + alert(`onSend: ${msg}`)}> +
+ {/* Sidebar */} + + + {/* Content */} +
+
+ Copilot Styleguide +

+ Static showcase of all chat message types, tool states & + variants. +

+ + {/* ============================================================= */} + {/* MESSAGE TYPES */} + {/* ============================================================= */} + +
+ + + + + Find me an agent that can summarize YouTube videos + + + + + + + + + + I found a few agents that can help with YouTube video + summarization. Let me search for the best options for you. + + + + + + + + + + {`Here's what I found:\n\n1. **YouTube Summarizer** — Extracts key points from any YouTube video\n2. **Video Digest** — Creates bullet-point summaries with timestamps\n\n> Both agents support videos up to 2 hours long.\n\n\`\`\`python\n# Example usage\nresult = agent.run(url="https://youtube.com/watch?v=...")\nprint(result.summary)\n\`\`\``} + + + + + + + + + + Thinking... + + + + + + +
+ Error: Connection timed out. Please try again. +
+
+
+ + {/* ============================================================= */} + {/* FIND BLOCKS */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* FIND AGENTS (Marketplace) */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* FIND AGENTS (Library) */} + {/* ============================================================= */} + +
+ + + + + + + +
+ + {/* ============================================================= */} + {/* SEARCH DOCS */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* GET DOC PAGE */} + {/* ============================================================= */} + +
+ + + + + + + +
+ + {/* ============================================================= */} + {/* RUN BLOCK */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Overall, this was our strongest quarter to date.\n\n| Metric | Q3 | Q4 | Change |\n|--------|-----|-----|--------|\n| Revenue | $2.1M | $2.6M | +23% |\n| Users | 10k | 20k | +100% |\n| NPS | 72 | 78 | +6 |", + ], + }, + }, + }} + /> + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* RUN AGENT */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* SCHEDULE AGENT */} + {/* ============================================================= */} + +
+ + + + + + + +
+ + {/* ============================================================= */} + {/* CREATE AGENT */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* EDIT AGENT */} + {/* ============================================================= */} + +
+ + + + + + + + + + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* VIEW AGENT OUTPUT */} + {/* ============================================================= */} + +
+ + + + + + + + + + AI is not replacing doctors — it's augmenting their capabilities.\n\n### Adoption by Region\n\n| Region | Adoption Rate | Growth |\n|--------|--------------|--------|\n| North America | 78% | +15% |\n| Europe | 62% | +22% |\n| Asia Pacific | 71% | +31% |", + ], + metadata: [ + { + sources_analyzed: 142, + confidence_score: 0.94, + processing_time_ms: 3420, + model_version: "v2.3.1", + categories: [ + "healthcare", + "machine-learning", + "diagnostics", + ], + }, + ], + chart: [ + "https://picsum.photos/seed/chart-demo/500/300", + ], + }, + }, + }, + }} + /> + + + + + + + + + + + + + + + + + +
+ + {/* ============================================================= */} + {/* FULL CONVERSATION EXAMPLE */} + {/* ============================================================= */} + +
+ + + + + + Find me a block that can fetch weather data + + + + + + + + Let me search for weather-related blocks for you. + + + + + + I found 2 blocks related to weather. The **Get Weather** + block fetches current conditions, while **Weather + Forecast** provides a 5-day outlook. Would you like me + to run one of these? + + + + + + + + Yes, run the Get Weather block for San Francisco + + + + + + + + + + The current weather in San Francisco is **68°F** and + **Foggy** with 85% humidity and winds from the west at + 12 mph. + + + + + +
+
+
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/CreateAgent.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/CreateAgent.tsx new file mode 100644 index 0000000000..5dc2f40dfe --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/CreateAgent.tsx @@ -0,0 +1,237 @@ +"use client"; + +import { WarningDiamondIcon } from "@phosphor-icons/react"; +import type { ToolUIPart } from "ai"; +import { useCopilotChatActions } from "../../components/CopilotChatActionsProvider/useCopilotChatActions"; +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader"; +import { ProgressBar } from "../../components/ProgressBar/ProgressBar"; +import { + ContentCardDescription, + ContentCodeBlock, + ContentGrid, + ContentHint, + ContentLink, + ContentMessage, +} from "../../components/ToolAccordion/AccordionContent"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { useAsymptoticProgress } from "../../hooks/useAsymptoticProgress"; +import { + ClarificationQuestionsCard, + ClarifyingQuestion, +} from "./components/ClarificationQuestionsCard"; +import { + AccordionIcon, + formatMaybeJson, + getAnimationText, + getCreateAgentToolOutput, + isAgentPreviewOutput, + isAgentSavedOutput, + isClarificationNeededOutput, + isErrorOutput, + isOperationInProgressOutput, + isOperationPendingOutput, + isOperationStartedOutput, + ToolIcon, + truncateText, + type CreateAgentToolOutput, +} from "./helpers"; + +export interface CreateAgentToolPart { + type: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +} + +interface Props { + part: CreateAgentToolPart; +} + +function getAccordionMeta(output: CreateAgentToolOutput): { + icon: React.ReactNode; + title: React.ReactNode; + titleClassName?: string; + description?: string; +} { + const icon = ; + + if (isAgentSavedOutput(output)) { + return { icon, title: output.agent_name }; + } + if (isAgentPreviewOutput(output)) { + return { + icon, + title: output.agent_name, + description: `${output.node_count} block${output.node_count === 1 ? "" : "s"}`, + }; + } + if (isClarificationNeededOutput(output)) { + const questions = output.questions ?? []; + return { + icon, + title: "Needs clarification", + description: `${questions.length} question${questions.length === 1 ? "" : "s"}`, + }; + } + if ( + isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output) + ) { + return { + icon: , + title: "Creating agent, this may take a few minutes. Sit back and relax.", + }; + } + return { + icon: ( + + ), + title: "Error", + titleClassName: "text-red-500", + }; +} + +export function CreateAgentTool({ part }: Props) { + const text = getAnimationText(part); + const { onSend } = useCopilotChatActions(); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + + const output = getCreateAgentToolOutput(part); + const isError = + part.state === "output-error" || (!!output && isErrorOutput(output)); + const isOperating = + !!output && + (isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output)); + const progress = useAsymptoticProgress(isOperating); + const hasExpandableContent = + part.state === "output-available" && + !!output && + (isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output) || + isAgentPreviewOutput(output) || + isAgentSavedOutput(output) || + isClarificationNeededOutput(output) || + isErrorOutput(output)); + + function handleClarificationAnswers(answers: Record) { + const questions = + output && isClarificationNeededOutput(output) + ? (output.questions ?? []) + : []; + + const contextMessage = questions + .map((q) => { + const answer = answers[q.keyword] || ""; + return `> ${q.question}\n\n${answer}`; + }) + .join("\n\n"); + + onSend( + `**Here are my answers:**\n\n${contextMessage}\n\nPlease proceed with creating the agent.`, + ); + } + + return ( +
+
+ + +
+ + {hasExpandableContent && output && ( + + {isOperating && ( + + + + This could take a few minutes, grab a coffee ☕ + + + )} + + {isAgentSavedOutput(output) && ( + + {output.message} +
+ + Open in library + + + Open in builder + +
+ + {truncateText( + formatMaybeJson({ agent_id: output.agent_id }), + 800, + )} + +
+ )} + + {isAgentPreviewOutput(output) && ( + + {output.message} + {output.description?.trim() && ( + + {output.description} + + )} + + {truncateText(formatMaybeJson(output.agent_json), 1600)} + + + )} + + {isClarificationNeededOutput(output) && ( + { + const item: ClarifyingQuestion = { + question: q.question, + keyword: q.keyword, + }; + const example = + typeof q.example === "string" && q.example.trim() + ? q.example.trim() + : null; + if (example) item.example = example; + return item; + })} + message={output.message} + onSubmitAnswers={handleClarificationAnswers} + /> + )} + + {isErrorOutput(output) && ( + + {output.message} + {output.error && ( + + {formatMaybeJson(output.error)} + + )} + {output.details && ( + + {formatMaybeJson(output.details)} + + )} + + )} +
+ )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ClarificationQuestionsWidget/ClarificationQuestionsWidget.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/components/ClarificationQuestionsCard.tsx similarity index 98% rename from autogpt_platform/frontend/src/components/contextual/Chat/components/ClarificationQuestionsWidget/ClarificationQuestionsWidget.tsx rename to autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/components/ClarificationQuestionsCard.tsx index 3b225d1ef1..abcb04731e 100644 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ClarificationQuestionsWidget/ClarificationQuestionsWidget.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/components/ClarificationQuestionsCard.tsx @@ -6,7 +6,7 @@ import { Input } from "@/components/atoms/Input/Input"; import { Text } from "@/components/atoms/Text/Text"; import { cn } from "@/lib/utils"; import { CheckCircleIcon, QuestionIcon } from "@phosphor-icons/react"; -import { useState, useEffect, useRef } from "react"; +import { useEffect, useRef, useState } from "react"; export interface ClarifyingQuestion { question: string; @@ -24,12 +24,7 @@ interface Props { className?: string; } -function getStorageKey(sessionId?: string): string | null { - if (!sessionId) return null; - return `clarification_answers_${sessionId}`; -} - -export function ClarificationQuestionsWidget({ +export function ClarificationQuestionsCard({ questions, message, sessionId, @@ -241,3 +236,8 @@ export function ClarificationQuestionsWidget({
); } + +function getStorageKey(sessionId?: string): string | null { + if (!sessionId) return null; + return `clarification_answers_${sessionId}`; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/helpers.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/helpers.tsx new file mode 100644 index 0000000000..bd47eac051 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/CreateAgent/helpers.tsx @@ -0,0 +1,186 @@ +import type { AgentPreviewResponse } from "@/app/api/__generated__/models/agentPreviewResponse"; +import type { AgentSavedResponse } from "@/app/api/__generated__/models/agentSavedResponse"; +import type { ClarificationNeededResponse } from "@/app/api/__generated__/models/clarificationNeededResponse"; +import type { ErrorResponse } from "@/app/api/__generated__/models/errorResponse"; +import type { OperationInProgressResponse } from "@/app/api/__generated__/models/operationInProgressResponse"; +import type { OperationPendingResponse } from "@/app/api/__generated__/models/operationPendingResponse"; +import type { OperationStartedResponse } from "@/app/api/__generated__/models/operationStartedResponse"; +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import { + PlusCircleIcon, + PlusIcon, + WarningDiamondIcon, +} from "@phosphor-icons/react"; +import type { ToolUIPart } from "ai"; +import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader"; + +export type CreateAgentToolOutput = + | OperationStartedResponse + | OperationPendingResponse + | OperationInProgressResponse + | AgentPreviewResponse + | AgentSavedResponse + | ClarificationNeededResponse + | ErrorResponse; + +function parseOutput(output: unknown): CreateAgentToolOutput | null { + if (!output) return null; + if (typeof output === "string") { + const trimmed = output.trim(); + if (!trimmed) return null; + try { + return parseOutput(JSON.parse(trimmed) as unknown); + } catch { + return null; + } + } + if (typeof output === "object") { + const type = (output as { type?: unknown }).type; + if ( + type === ResponseType.operation_started || + type === ResponseType.operation_pending || + type === ResponseType.operation_in_progress || + type === ResponseType.agent_preview || + type === ResponseType.agent_saved || + type === ResponseType.clarification_needed || + type === ResponseType.error + ) { + return output as CreateAgentToolOutput; + } + if ("operation_id" in output && "tool_name" in output) + return output as OperationStartedResponse | OperationPendingResponse; + if ("tool_call_id" in output) return output as OperationInProgressResponse; + if ("agent_json" in output && "agent_name" in output) + return output as AgentPreviewResponse; + if ("agent_id" in output && "library_agent_id" in output) + return output as AgentSavedResponse; + if ("questions" in output) return output as ClarificationNeededResponse; + if ("error" in output || "details" in output) + return output as ErrorResponse; + } + return null; +} + +export function getCreateAgentToolOutput( + part: unknown, +): CreateAgentToolOutput | null { + if (!part || typeof part !== "object") return null; + return parseOutput((part as { output?: unknown }).output); +} + +export function isOperationStartedOutput( + output: CreateAgentToolOutput, +): output is OperationStartedResponse { + return ( + output.type === ResponseType.operation_started || + ("operation_id" in output && "tool_name" in output) + ); +} + +export function isOperationPendingOutput( + output: CreateAgentToolOutput, +): output is OperationPendingResponse { + return output.type === ResponseType.operation_pending; +} + +export function isOperationInProgressOutput( + output: CreateAgentToolOutput, +): output is OperationInProgressResponse { + return ( + output.type === ResponseType.operation_in_progress || + "tool_call_id" in output + ); +} + +export function isAgentPreviewOutput( + output: CreateAgentToolOutput, +): output is AgentPreviewResponse { + return output.type === ResponseType.agent_preview || "agent_json" in output; +} + +export function isAgentSavedOutput( + output: CreateAgentToolOutput, +): output is AgentSavedResponse { + return ( + output.type === ResponseType.agent_saved || "agent_page_link" in output + ); +} + +export function isClarificationNeededOutput( + output: CreateAgentToolOutput, +): output is ClarificationNeededResponse { + return ( + output.type === ResponseType.clarification_needed || "questions" in output + ); +} + +export function isErrorOutput( + output: CreateAgentToolOutput, +): output is ErrorResponse { + return output.type === ResponseType.error || "error" in output; +} + +export function getAnimationText(part: { + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +}): string { + switch (part.state) { + case "input-streaming": + case "input-available": + return "Creating a new agent"; + case "output-available": { + const output = parseOutput(part.output); + if (!output) return "Creating a new agent"; + if (isOperationStartedOutput(output)) return "Agent creation started"; + if (isOperationPendingOutput(output)) return "Agent creation in progress"; + if (isOperationInProgressOutput(output)) + return "Agent creation already in progress"; + if (isAgentSavedOutput(output)) return `Saved "${output.agent_name}"`; + if (isAgentPreviewOutput(output)) return `Preview "${output.agent_name}"`; + if (isClarificationNeededOutput(output)) return "Needs clarification"; + return "Error creating agent"; + } + case "output-error": + return "Error creating agent"; + default: + return "Creating a new agent"; + } +} + +export function ToolIcon({ + isStreaming, + isError, +}: { + isStreaming?: boolean; + isError?: boolean; +}) { + if (isError) { + return ( + + ); + } + if (isStreaming) { + return ; + } + return ; +} + +export function AccordionIcon() { + return ; +} + +export function formatMaybeJson(value: unknown): string { + if (typeof value === "string") return value; + try { + return JSON.stringify(value, null, 2); + } catch { + return String(value); + } +} + +export function truncateText(text: string, maxChars: number): string { + const trimmed = text.trim(); + if (trimmed.length <= maxChars) return trimmed; + return `${trimmed.slice(0, maxChars).trimEnd()}…`; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/EditAgent.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/EditAgent.tsx new file mode 100644 index 0000000000..3beb9e7e1e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/EditAgent.tsx @@ -0,0 +1,234 @@ +"use client"; + +import { WarningDiamondIcon } from "@phosphor-icons/react"; +import type { ToolUIPart } from "ai"; +import { useCopilotChatActions } from "../../components/CopilotChatActionsProvider/useCopilotChatActions"; +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader"; +import { ProgressBar } from "../../components/ProgressBar/ProgressBar"; +import { + ContentCardDescription, + ContentCodeBlock, + ContentGrid, + ContentHint, + ContentLink, + ContentMessage, +} from "../../components/ToolAccordion/AccordionContent"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { useAsymptoticProgress } from "../../hooks/useAsymptoticProgress"; +import { + ClarificationQuestionsCard, + ClarifyingQuestion, +} from "../CreateAgent/components/ClarificationQuestionsCard"; +import { + AccordionIcon, + formatMaybeJson, + getAnimationText, + getEditAgentToolOutput, + isAgentPreviewOutput, + isAgentSavedOutput, + isClarificationNeededOutput, + isErrorOutput, + isOperationInProgressOutput, + isOperationPendingOutput, + isOperationStartedOutput, + ToolIcon, + truncateText, + type EditAgentToolOutput, +} from "./helpers"; + +export interface EditAgentToolPart { + type: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +} + +interface Props { + part: EditAgentToolPart; +} + +function getAccordionMeta(output: EditAgentToolOutput): { + icon: React.ReactNode; + title: string; + titleClassName?: string; + description?: string; +} { + const icon = ; + + if (isAgentSavedOutput(output)) { + return { icon, title: output.agent_name }; + } + if (isAgentPreviewOutput(output)) { + return { + icon, + title: output.agent_name, + description: `${output.node_count} block${output.node_count === 1 ? "" : "s"}`, + }; + } + if (isClarificationNeededOutput(output)) { + const questions = output.questions ?? []; + return { + icon, + title: "Needs clarification", + description: `${questions.length} question${questions.length === 1 ? "" : "s"}`, + }; + } + if ( + isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output) + ) { + return { icon: , title: "Editing agent" }; + } + return { + icon: ( + + ), + title: "Error", + titleClassName: "text-red-500", + }; +} + +export function EditAgentTool({ part }: Props) { + const text = getAnimationText(part); + const { onSend } = useCopilotChatActions(); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + + const output = getEditAgentToolOutput(part); + const isError = + part.state === "output-error" || (!!output && isErrorOutput(output)); + const isOperating = + !!output && + (isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output)); + const progress = useAsymptoticProgress(isOperating); + const hasExpandableContent = + part.state === "output-available" && + !!output && + (isOperationStartedOutput(output) || + isOperationPendingOutput(output) || + isOperationInProgressOutput(output) || + isAgentPreviewOutput(output) || + isAgentSavedOutput(output) || + isClarificationNeededOutput(output) || + isErrorOutput(output)); + + function handleClarificationAnswers(answers: Record) { + const questions = + output && isClarificationNeededOutput(output) + ? (output.questions ?? []) + : []; + + const contextMessage = questions + .map((q) => { + const answer = answers[q.keyword] || ""; + return `> ${q.question}\n\n${answer}`; + }) + .join("\n\n"); + + onSend( + `**Here are my answers:**\n\n${contextMessage}\n\nPlease proceed with editing the agent.`, + ); + } + + return ( +
+
+ + +
+ + {hasExpandableContent && output && ( + + {isOperating && ( + + + + This could take a few minutes, grab a coffee ☕ + + + )} + + {isAgentSavedOutput(output) && ( + + {output.message} +
+ + Open in library + + + Open in builder + +
+ + {truncateText( + formatMaybeJson({ agent_id: output.agent_id }), + 800, + )} + +
+ )} + + {isAgentPreviewOutput(output) && ( + + {output.message} + {output.description?.trim() && ( + + {output.description} + + )} + + {truncateText(formatMaybeJson(output.agent_json), 1600)} + + + )} + + {isClarificationNeededOutput(output) && ( + { + const item: ClarifyingQuestion = { + question: q.question, + keyword: q.keyword, + }; + const example = + typeof q.example === "string" && q.example.trim() + ? q.example.trim() + : null; + if (example) item.example = example; + return item; + })} + message={output.message} + onSubmitAnswers={handleClarificationAnswers} + /> + )} + + {isErrorOutput(output) && ( + + {output.message} + {output.error && ( + + {formatMaybeJson(output.error)} + + )} + {output.details && ( + + {formatMaybeJson(output.details)} + + )} + + )} +
+ )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/helpers.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/helpers.tsx new file mode 100644 index 0000000000..a0db50cddc --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/EditAgent/helpers.tsx @@ -0,0 +1,188 @@ +import type { AgentPreviewResponse } from "@/app/api/__generated__/models/agentPreviewResponse"; +import type { AgentSavedResponse } from "@/app/api/__generated__/models/agentSavedResponse"; +import type { ClarificationNeededResponse } from "@/app/api/__generated__/models/clarificationNeededResponse"; +import type { ErrorResponse } from "@/app/api/__generated__/models/errorResponse"; +import type { OperationInProgressResponse } from "@/app/api/__generated__/models/operationInProgressResponse"; +import type { OperationPendingResponse } from "@/app/api/__generated__/models/operationPendingResponse"; +import type { OperationStartedResponse } from "@/app/api/__generated__/models/operationStartedResponse"; +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import { + NotePencilIcon, + PencilLineIcon, + WarningDiamondIcon, +} from "@phosphor-icons/react"; +import type { ToolUIPart } from "ai"; +import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader"; + +export type EditAgentToolOutput = + | OperationStartedResponse + | OperationPendingResponse + | OperationInProgressResponse + | AgentPreviewResponse + | AgentSavedResponse + | ClarificationNeededResponse + | ErrorResponse; + +function parseOutput(output: unknown): EditAgentToolOutput | null { + if (!output) return null; + if (typeof output === "string") { + const trimmed = output.trim(); + if (!trimmed) return null; + try { + return parseOutput(JSON.parse(trimmed) as unknown); + } catch { + return null; + } + } + if (typeof output === "object") { + const type = (output as { type?: unknown }).type; + if ( + type === ResponseType.operation_started || + type === ResponseType.operation_pending || + type === ResponseType.operation_in_progress || + type === ResponseType.agent_preview || + type === ResponseType.agent_saved || + type === ResponseType.clarification_needed || + type === ResponseType.error + ) { + return output as EditAgentToolOutput; + } + if ("operation_id" in output && "tool_name" in output) + return output as OperationStartedResponse | OperationPendingResponse; + if ("tool_call_id" in output) return output as OperationInProgressResponse; + if ("agent_json" in output && "agent_name" in output) + return output as AgentPreviewResponse; + if ("agent_id" in output && "library_agent_id" in output) + return output as AgentSavedResponse; + if ("questions" in output) return output as ClarificationNeededResponse; + if ("error" in output || "details" in output) + return output as ErrorResponse; + } + return null; +} + +export function getEditAgentToolOutput( + part: unknown, +): EditAgentToolOutput | null { + if (!part || typeof part !== "object") return null; + return parseOutput((part as { output?: unknown }).output); +} + +export function isOperationStartedOutput( + output: EditAgentToolOutput, +): output is OperationStartedResponse { + return ( + output.type === ResponseType.operation_started || + ("operation_id" in output && "tool_name" in output) + ); +} + +export function isOperationPendingOutput( + output: EditAgentToolOutput, +): output is OperationPendingResponse { + return output.type === ResponseType.operation_pending; +} + +export function isOperationInProgressOutput( + output: EditAgentToolOutput, +): output is OperationInProgressResponse { + return ( + output.type === ResponseType.operation_in_progress || + "tool_call_id" in output + ); +} + +export function isAgentPreviewOutput( + output: EditAgentToolOutput, +): output is AgentPreviewResponse { + return output.type === ResponseType.agent_preview || "agent_json" in output; +} + +export function isAgentSavedOutput( + output: EditAgentToolOutput, +): output is AgentSavedResponse { + return ( + output.type === ResponseType.agent_saved || "agent_page_link" in output + ); +} + +export function isClarificationNeededOutput( + output: EditAgentToolOutput, +): output is ClarificationNeededResponse { + return ( + output.type === ResponseType.clarification_needed || "questions" in output + ); +} + +export function isErrorOutput( + output: EditAgentToolOutput, +): output is ErrorResponse { + return output.type === ResponseType.error || "error" in output; +} + +export function getAnimationText(part: { + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +}): string { + switch (part.state) { + case "input-streaming": + case "input-available": + return "Editing the agent"; + case "output-available": { + const output = parseOutput(part.output); + if (!output) return "Editing the agent"; + if (isOperationStartedOutput(output)) return "Agent update started"; + if (isOperationPendingOutput(output)) return "Agent update in progress"; + if (isOperationInProgressOutput(output)) + return "Agent update already in progress"; + if (isAgentSavedOutput(output)) return `Saved "${output.agent_name}"`; + if (isAgentPreviewOutput(output)) return `Preview "${output.agent_name}"`; + if (isClarificationNeededOutput(output)) return "Needs clarification"; + return "Error editing agent"; + } + case "output-error": + return "Error editing agent"; + default: + return "Editing the agent"; + } +} + +export function ToolIcon({ + isStreaming, + isError, +}: { + isStreaming?: boolean; + isError?: boolean; +}) { + if (isError) { + return ( + + ); + } + if (isStreaming) { + return ; + } + return ( + + ); +} + +export function AccordionIcon() { + return ; +} + +export function formatMaybeJson(value: unknown): string { + if (typeof value === "string") return value; + try { + return JSON.stringify(value, null, 2); + } catch { + return String(value); + } +} + +export function truncateText(text: string, maxChars: number): string { + const trimmed = text.trim(); + if (trimmed.length <= maxChars) return trimmed; + return `${trimmed.slice(0, maxChars).trimEnd()}…`; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/FindAgents.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/FindAgents.tsx new file mode 100644 index 0000000000..4f0068b2c5 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/FindAgents.tsx @@ -0,0 +1,127 @@ +"use client"; + +import { ToolUIPart } from "ai"; +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { + ContentBadge, + ContentCard, + ContentCardDescription, + ContentCardHeader, + ContentCardTitle, + ContentGrid, + ContentLink, +} from "../../components/ToolAccordion/AccordionContent"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { + AccordionIcon, + getAgentHref, + getAnimationText, + getFindAgentsOutput, + getSourceLabelFromToolType, + isAgentsFoundOutput, + isErrorOutput, + ToolIcon, +} from "./helpers"; + +export interface FindAgentsToolPart { + type: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +} + +interface Props { + part: FindAgentsToolPart; +} + +export function FindAgentsTool({ part }: Props) { + const text = getAnimationText(part); + const output = getFindAgentsOutput(part); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + const isError = + part.state === "output-error" || (!!output && isErrorOutput(output)); + + const query = + typeof part.input === "object" && part.input !== null + ? String((part.input as { query?: unknown }).query ?? "").trim() + : ""; + + const agentsFoundOutput = + part.state === "output-available" && output && isAgentsFoundOutput(output) + ? output + : null; + + const hasAgents = + !!agentsFoundOutput && + agentsFoundOutput.agents.length > 0 && + (typeof agentsFoundOutput.count !== "number" || + agentsFoundOutput.count > 0); + const totalCount = agentsFoundOutput ? agentsFoundOutput.count : 0; + const { source } = getSourceLabelFromToolType(part.type); + const scopeText = + source === "library" + ? "in your library" + : source === "marketplace" + ? "in marketplace" + : ""; + const accordionDescription = `Found ${totalCount}${scopeText ? ` ${scopeText}` : ""}${ + query ? ` for "${query}"` : "" + }`; + + return ( +
+
+ + +
+ + {hasAgents && agentsFoundOutput && ( + } + title="Agent results" + description={accordionDescription} + > + + {agentsFoundOutput.agents.map((agent) => { + const href = getAgentHref(agent); + const agentSource = + agent.source === "library" + ? "Library" + : agent.source === "marketplace" + ? "Marketplace" + : null; + return ( + + Open : null + } + > +
+ {agent.name} + {agentSource && ( + {agentSource} + )} +
+ + {agent.description} + +
+
+ ); + })} +
+
+ )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/helpers.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/helpers.tsx new file mode 100644 index 0000000000..f253947953 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindAgents/helpers.tsx @@ -0,0 +1,187 @@ +import type { AgentInfo } from "@/app/api/__generated__/models/agentInfo"; +import type { AgentsFoundResponse } from "@/app/api/__generated__/models/agentsFoundResponse"; +import type { ErrorResponse } from "@/app/api/__generated__/models/errorResponse"; +import type { NoResultsResponse } from "@/app/api/__generated__/models/noResultsResponse"; +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import { + FolderOpenIcon, + MagnifyingGlassIcon, + SquaresFourIcon, + StorefrontIcon, +} from "@phosphor-icons/react"; +import { ToolUIPart } from "ai"; + +export interface FindAgentInput { + query: string; +} + +export type FindAgentsOutput = + | AgentsFoundResponse + | NoResultsResponse + | ErrorResponse; + +export type FindAgentsToolType = + | "tool-find_agent" + | "tool-find_library_agent" + | (string & {}); + +function parseOutput(output: unknown): FindAgentsOutput | null { + if (!output) return null; + if (typeof output === "string") { + const trimmed = output.trim(); + if (!trimmed) return null; + try { + return parseOutput(JSON.parse(trimmed) as unknown); + } catch { + return null; + } + } + if (typeof output === "object") { + const type = (output as { type?: unknown }).type; + if ( + type === ResponseType.agents_found || + type === ResponseType.no_results || + type === ResponseType.error + ) { + return output as FindAgentsOutput; + } + if ("agents" in output && "count" in output) + return output as AgentsFoundResponse; + if ("suggestions" in output && !("error" in output)) + return output as NoResultsResponse; + if ("error" in output || "details" in output) + return output as ErrorResponse; + } + return null; +} + +export function getFindAgentsOutput(part: unknown): FindAgentsOutput | null { + if (!part || typeof part !== "object") return null; + return parseOutput((part as { output?: unknown }).output); +} + +export function isAgentsFoundOutput( + output: FindAgentsOutput, +): output is AgentsFoundResponse { + return output.type === ResponseType.agents_found || "agents" in output; +} + +export function isNoResultsOutput( + output: FindAgentsOutput, +): output is NoResultsResponse { + return ( + output.type === ResponseType.no_results || + ("suggestions" in output && !("error" in output)) + ); +} + +export function isErrorOutput( + output: FindAgentsOutput, +): output is ErrorResponse { + return output.type === ResponseType.error || "error" in output; +} + +export function getSourceLabelFromToolType(toolType?: FindAgentsToolType): { + source: "marketplace" | "library" | "unknown"; + label: string; +} { + if (toolType === "tool-find_library_agent") { + return { source: "library", label: "Library" }; + } + if (toolType === "tool-find_agent") { + return { source: "marketplace", label: "Marketplace" }; + } + return { source: "unknown", label: "Agents" }; +} + +export function getAnimationText(part: { + type?: FindAgentsToolType; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +}): string { + const { source } = getSourceLabelFromToolType(part.type); + const query = (part.input as FindAgentInput | undefined)?.query?.trim(); + + // Action phrase matching legacy ToolCallMessage + const actionPhrase = + source === "library" + ? "Looking for library agents" + : "Looking for agents in the marketplace"; + + const queryText = query ? ` matching "${query}"` : ""; + + switch (part.state) { + case "input-streaming": + case "input-available": + return `${actionPhrase}${queryText}`; + + case "output-available": { + const output = parseOutput(part.output); + if (!output) { + return `${actionPhrase}${queryText}`; + } + if (isNoResultsOutput(output)) { + return `No agents found${queryText}`; + } + if (isAgentsFoundOutput(output)) { + const count = output.count ?? output.agents?.length ?? 0; + return `Found ${count} agent${count === 1 ? "" : "s"}${queryText}`; + } + if (isErrorOutput(output)) { + return `Error finding agents${queryText}`; + } + return `${actionPhrase}${queryText}`; + } + + case "output-error": + return `Error finding agents${queryText}`; + + default: + return actionPhrase; + } +} + +export function getAgentHref(agent: AgentInfo): string | null { + if (agent.source === "library") { + return `/library/agents/${encodeURIComponent(agent.id)}`; + } + + const [creator, slug, ...rest] = agent.id.split("/"); + if (!creator || !slug || rest.length > 0) return null; + return `/marketplace/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`; +} + +export function ToolIcon({ + toolType, + isStreaming, + isError, +}: { + toolType?: FindAgentsToolType; + isStreaming?: boolean; + isError?: boolean; +}) { + const { source } = getSourceLabelFromToolType(toolType); + const IconComponent = + source === "library" ? MagnifyingGlassIcon : SquaresFourIcon; + + return ( + + ); +} + +export function AccordionIcon({ toolType }: { toolType?: FindAgentsToolType }) { + const { source } = getSourceLabelFromToolType(toolType); + const IconComponent = source === "library" ? FolderOpenIcon : StorefrontIcon; + return ; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/FindBlocks.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/FindBlocks.tsx new file mode 100644 index 0000000000..3684a2da14 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/FindBlocks.tsx @@ -0,0 +1,92 @@ +"use client"; + +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { + ContentCard, + ContentCardDescription, + ContentCardTitle, +} from "../../components/ToolAccordion/AccordionContent"; +import type { BlockListResponse } from "@/app/api/__generated__/models/blockListResponse"; +import type { BlockInfoSummary } from "@/app/api/__generated__/models/blockInfoSummary"; +import { ToolUIPart } from "ai"; +import { HorizontalScroll } from "@/app/(platform)/build/components/NewControlPanel/NewBlockMenu/HorizontalScroll"; +import { + AccordionIcon, + getAnimationText, + parseOutput, + ToolIcon, +} from "./helpers"; + +export interface FindBlockInput { + query: string; +} + +export type FindBlockOutput = BlockListResponse; + +export interface FindBlockToolPart { + type: string; + toolName?: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: FindBlockInput | unknown; + output?: string | FindBlockOutput | unknown; + title?: string; +} + +interface Props { + part: FindBlockToolPart; +} + +function BlockCard({ block }: { block: BlockInfoSummary }) { + return ( + + {block.name} + + {block.description} + + + ); +} + +export function FindBlocksTool({ part }: Props) { + const text = getAnimationText(part); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + const isError = part.state === "output-error"; + + const parsed = + part.state === "output-available" ? parseOutput(part.output) : null; + const hasBlocks = !!parsed && parsed.blocks.length > 0; + + const query = (part.input as FindBlockInput | undefined)?.query?.trim(); + const accordionDescription = parsed + ? `Found ${parsed.count} block${parsed.count === 1 ? "" : "s"}${query ? ` for "${query}"` : ""}` + : undefined; + + return ( +
+
+ + +
+ + {hasBlocks && parsed && ( + } + title="Block results" + description={accordionDescription} + > + + {parsed.blocks.map((block) => ( + + ))} + + + )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/helpers.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/helpers.tsx new file mode 100644 index 0000000000..eaebe98ea5 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/FindBlocks/helpers.tsx @@ -0,0 +1,75 @@ +import type { BlockListResponse } from "@/app/api/__generated__/models/blockListResponse"; +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import { CubeIcon, PackageIcon } from "@phosphor-icons/react"; +import { FindBlockInput, FindBlockToolPart } from "./FindBlocks"; + +export function parseOutput(output: unknown): BlockListResponse | null { + if (!output) return null; + if (typeof output === "string") { + const trimmed = output.trim(); + if (!trimmed) return null; + try { + return parseOutput(JSON.parse(trimmed) as unknown); + } catch { + return null; + } + } + if (typeof output === "object") { + const type = (output as { type?: unknown }).type; + if (type === ResponseType.block_list || "blocks" in output) { + return output as BlockListResponse; + } + } + return null; +} + +export function getAnimationText(part: FindBlockToolPart): string { + const query = (part.input as FindBlockInput | undefined)?.query?.trim(); + const queryText = query ? ` matching "${query}"` : ""; + + switch (part.state) { + case "input-streaming": + case "input-available": + return `Searching for blocks${queryText}`; + + case "output-available": { + const parsed = parseOutput(part.output); + if (parsed) { + return `Found ${parsed.count} block${parsed.count === 1 ? "" : "s"}${queryText}`; + } + return `Searching for blocks${queryText}`; + } + + case "output-error": + return `Error finding blocks${queryText}`; + + default: + return "Searching for blocks"; + } +} + +export function ToolIcon({ + isStreaming, + isError, +}: { + isStreaming?: boolean; + isError?: boolean; +}) { + return ( + + ); +} + +export function AccordionIcon() { + return ; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/RunAgent.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/RunAgent.tsx new file mode 100644 index 0000000000..51044848b9 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/RunAgent.tsx @@ -0,0 +1,93 @@ +"use client"; + +import type { ToolUIPart } from "ai"; +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { ContentMessage } from "../../components/ToolAccordion/AccordionContent"; +import { + getAccordionMeta, + getAnimationText, + getRunAgentToolOutput, + isRunAgentAgentDetailsOutput, + isRunAgentErrorOutput, + isRunAgentExecutionStartedOutput, + isRunAgentNeedLoginOutput, + isRunAgentSetupRequirementsOutput, + ToolIcon, +} from "./helpers"; +import { ExecutionStartedCard } from "./components/ExecutionStartedCard/ExecutionStartedCard"; +import { AgentDetailsCard } from "./components/AgentDetailsCard/AgentDetailsCard"; +import { SetupRequirementsCard } from "./components/SetupRequirementsCard/SetupRequirementsCard"; +import { ErrorCard } from "./components/ErrorCard/ErrorCard"; + +export interface RunAgentToolPart { + type: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +} + +interface Props { + part: RunAgentToolPart; +} + +export function RunAgentTool({ part }: Props) { + const text = getAnimationText(part); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + + const output = getRunAgentToolOutput(part); + const isError = + part.state === "output-error" || + (!!output && isRunAgentErrorOutput(output)); + const hasExpandableContent = + part.state === "output-available" && + !!output && + (isRunAgentExecutionStartedOutput(output) || + isRunAgentAgentDetailsOutput(output) || + isRunAgentSetupRequirementsOutput(output) || + isRunAgentNeedLoginOutput(output) || + isRunAgentErrorOutput(output)); + + return ( +
+
+ + +
+ + {hasExpandableContent && output && ( + + {isRunAgentExecutionStartedOutput(output) && ( + + )} + + {isRunAgentAgentDetailsOutput(output) && ( + + )} + + {isRunAgentSetupRequirementsOutput(output) && ( + + )} + + {isRunAgentNeedLoginOutput(output) && ( + {output.message} + )} + + {isRunAgentErrorOutput(output) && } + + )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/AgentDetailsCard.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/AgentDetailsCard.tsx new file mode 100644 index 0000000000..f18568faec --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/AgentDetailsCard.tsx @@ -0,0 +1,116 @@ +"use client"; + +import type { AgentDetailsResponse } from "@/app/api/__generated__/models/agentDetailsResponse"; +import { Button } from "@/components/atoms/Button/Button"; +import { Text } from "@/components/atoms/Text/Text"; +import { FormRenderer } from "@/components/renderers/InputRenderer/FormRenderer"; +import { AnimatePresence, motion } from "framer-motion"; +import { useState } from "react"; +import { useCopilotChatActions } from "../../../../components/CopilotChatActionsProvider/useCopilotChatActions"; +import { ContentMessage } from "../../../../components/ToolAccordion/AccordionContent"; +import { buildInputSchema } from "./helpers"; + +interface Props { + output: AgentDetailsResponse; +} + +export function AgentDetailsCard({ output }: Props) { + const { onSend } = useCopilotChatActions(); + const [showInputForm, setShowInputForm] = useState(false); + const [inputValues, setInputValues] = useState>({}); + + function handleRunWithExamples() { + onSend( + `Run the agent "${output.agent.name}" with placeholder/example values so I can test it.`, + ); + } + + function handleRunWithInputs() { + const nonEmpty = Object.fromEntries( + Object.entries(inputValues).filter( + ([, v]) => v !== undefined && v !== null && v !== "", + ), + ); + onSend( + `Run the agent "${output.agent.name}" with these inputs: ${JSON.stringify(nonEmpty, null, 2)}`, + ); + setShowInputForm(false); + setInputValues({}); + } + + return ( +
+ + Run this agent with example values or your own inputs. + + +
+ + +
+ + + {showInputForm && buildInputSchema(output.agent.inputs) && ( + +
+ Enter your inputs + setInputValues(v.formData ?? {})} + uiSchema={{ + "ui:submitButtonOptions": { norender: true }, + }} + initialValues={inputValues} + formContext={{ + showHandles: false, + size: "small", + }} + /> +
+ + +
+
+
+ )} +
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/helpers.ts new file mode 100644 index 0000000000..635b8d20d7 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/AgentDetailsCard/helpers.ts @@ -0,0 +1,8 @@ +import type { RJSFSchema } from "@rjsf/utils"; + +export function buildInputSchema(inputs: unknown): RJSFSchema | null { + if (!inputs || typeof inputs !== "object") return null; + const properties = inputs as RJSFSchema["properties"]; + if (!properties || Object.keys(properties).length === 0) return null; + return inputs as RJSFSchema; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ErrorCard/ErrorCard.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ErrorCard/ErrorCard.tsx new file mode 100644 index 0000000000..7990428947 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ErrorCard/ErrorCard.tsx @@ -0,0 +1,27 @@ +"use client"; + +import type { ErrorResponse } from "@/app/api/__generated__/models/errorResponse"; +import { + ContentCodeBlock, + ContentGrid, + ContentMessage, +} from "../../../../components/ToolAccordion/AccordionContent"; +import { formatMaybeJson } from "../../helpers"; + +interface Props { + output: ErrorResponse; +} + +export function ErrorCard({ output }: Props) { + return ( + + {output.message} + {output.error && ( + {formatMaybeJson(output.error)} + )} + {output.details && ( + {formatMaybeJson(output.details)} + )} + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ExecutionStartedCard/ExecutionStartedCard.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ExecutionStartedCard/ExecutionStartedCard.tsx new file mode 100644 index 0000000000..f98656e5ff --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/ExecutionStartedCard/ExecutionStartedCard.tsx @@ -0,0 +1,39 @@ +"use client"; + +import type { ExecutionStartedResponse } from "@/app/api/__generated__/models/executionStartedResponse"; +import { Button } from "@/components/atoms/Button/Button"; +import { useRouter } from "next/navigation"; +import { + ContentCard, + ContentCardDescription, + ContentCardSubtitle, + ContentCardTitle, + ContentGrid, +} from "../../../../components/ToolAccordion/AccordionContent"; + +interface Props { + output: ExecutionStartedResponse; +} + +export function ExecutionStartedCard({ output }: Props) { + const router = useRouter(); + + return ( + + + Execution started + {output.execution_id} + {output.message} + {output.library_agent_link && ( + + )} + + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/SetupRequirementsCard.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/SetupRequirementsCard.tsx new file mode 100644 index 0000000000..c6d116e62a --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/SetupRequirementsCard.tsx @@ -0,0 +1,105 @@ +"use client"; + +import { useState } from "react"; +import { CredentialsGroupedView } from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/CredentialsGroupedView"; +import { Button } from "@/components/atoms/Button/Button"; +import type { CredentialsMetaInput } from "@/lib/autogpt-server-api/types"; +import type { SetupRequirementsResponse } from "@/app/api/__generated__/models/setupRequirementsResponse"; +import { useCopilotChatActions } from "../../../../components/CopilotChatActionsProvider/useCopilotChatActions"; +import { + ContentBadge, + ContentCardDescription, + ContentCardTitle, + ContentMessage, +} from "../../../../components/ToolAccordion/AccordionContent"; +import { coerceCredentialFields, coerceExpectedInputs } from "./helpers"; + +interface Props { + output: SetupRequirementsResponse; +} + +export function SetupRequirementsCard({ output }: Props) { + const { onSend } = useCopilotChatActions(); + + const [inputCredentials, setInputCredentials] = useState< + Record + >({}); + const [hasSent, setHasSent] = useState(false); + + const { credentialFields, requiredCredentials } = coerceCredentialFields( + output.setup_info.user_readiness?.missing_credentials, + ); + + const expectedInputs = coerceExpectedInputs( + (output.setup_info.requirements as Record)?.inputs, + ); + + function handleCredentialChange(key: string, value?: CredentialsMetaInput) { + setInputCredentials((prev) => ({ ...prev, [key]: value })); + } + + const isAllComplete = + credentialFields.length > 0 && + [...requiredCredentials].every((key) => !!inputCredentials[key]); + + function handleProceed() { + setHasSent(true); + onSend( + "I've configured the required credentials. Please check if everything is ready and proceed with running the agent.", + ); + } + + return ( +
+ {output.message} + + {credentialFields.length > 0 && ( +
+ + {isAllComplete && !hasSent && ( + + )} +
+ )} + + {expectedInputs.length > 0 && ( +
+ + Expected inputs + +
+ {expectedInputs.map((input) => ( +
+
+ + {input.title} + + + {input.required ? "Required" : "Optional"} + +
+ + {input.name} • {input.type} + {input.description ? ` \u2022 ${input.description}` : ""} + +
+ ))} +
+
+ )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/helpers.ts new file mode 100644 index 0000000000..6bb10751f0 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/components/SetupRequirementsCard/helpers.ts @@ -0,0 +1,116 @@ +import type { CredentialField } from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/helpers"; + +const VALID_CREDENTIAL_TYPES = new Set([ + "api_key", + "oauth2", + "user_password", + "host_scoped", +]); + +/** + * Transforms raw missing_credentials from SetupRequirementsResponse + * into CredentialField[] tuples compatible with CredentialsGroupedView. + * + * Each CredentialField is [key, schema] where schema matches + * BlockIOCredentialsSubSchema shape. + */ +export function coerceCredentialFields(rawMissingCredentials: unknown): { + credentialFields: CredentialField[]; + requiredCredentials: Set; +} { + const missing = + rawMissingCredentials && typeof rawMissingCredentials === "object" + ? (rawMissingCredentials as Record) + : {}; + + const credentialFields: CredentialField[] = []; + const requiredCredentials = new Set(); + + Object.entries(missing).forEach(([key, value]) => { + if (!value || typeof value !== "object") return; + const cred = value as Record; + + const provider = + typeof cred.provider === "string" ? cred.provider.trim() : ""; + if (!provider) return; + + const types = + Array.isArray(cred.types) && cred.types.length > 0 + ? cred.types + : typeof cred.type === "string" + ? [cred.type] + : []; + + const credentialTypes = types + .map((t) => (typeof t === "string" ? t.trim() : "")) + .filter((t) => VALID_CREDENTIAL_TYPES.has(t)); + + if (credentialTypes.length === 0) return; + + const scopes = Array.isArray(cred.scopes) + ? cred.scopes.filter((s): s is string => typeof s === "string") + : undefined; + + const schema = { + type: "object" as const, + properties: {}, + credentials_provider: [provider], + credentials_types: credentialTypes, + credentials_scopes: scopes, + }; + + credentialFields.push([key, schema]); + requiredCredentials.add(key); + }); + + return { credentialFields, requiredCredentials }; +} + +export function coerceExpectedInputs(rawInputs: unknown): Array<{ + name: string; + title: string; + type: string; + description?: string; + required: boolean; +}> { + if (!Array.isArray(rawInputs)) return []; + const results: Array<{ + name: string; + title: string; + type: string; + description?: string; + required: boolean; + }> = []; + + rawInputs.forEach((value, index) => { + if (!value || typeof value !== "object") return; + const input = value as Record; + + const name = + typeof input.name === "string" && input.name.trim() + ? input.name.trim() + : `input-${index}`; + const title = + typeof input.title === "string" && input.title.trim() + ? input.title.trim() + : name; + const type = typeof input.type === "string" ? input.type : "unknown"; + const description = + typeof input.description === "string" && input.description.trim() + ? input.description.trim() + : undefined; + const required = Boolean(input.required); + + const item: { + name: string; + title: string; + type: string; + description?: string; + required: boolean; + } = { name, title, type, required }; + if (description) item.description = description; + results.push(item); + }); + + return results; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/helpers.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/helpers.tsx new file mode 100644 index 0000000000..0a117a71f2 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunAgent/helpers.tsx @@ -0,0 +1,248 @@ +import type { AgentDetailsResponse } from "@/app/api/__generated__/models/agentDetailsResponse"; +import type { ErrorResponse } from "@/app/api/__generated__/models/errorResponse"; +import type { ExecutionStartedResponse } from "@/app/api/__generated__/models/executionStartedResponse"; +import type { NeedLoginResponse } from "@/app/api/__generated__/models/needLoginResponse"; +import { ResponseType } from "@/app/api/__generated__/models/responseType"; +import type { SetupRequirementsResponse } from "@/app/api/__generated__/models/setupRequirementsResponse"; +import { + PlayIcon, + RocketLaunchIcon, + WarningDiamondIcon, +} from "@phosphor-icons/react"; +import type { ToolUIPart } from "ai"; +import { SpinnerLoader } from "../../components/SpinnerLoader/SpinnerLoader"; + +export interface RunAgentInput { + username_agent_slug?: string; + library_agent_id?: string; + inputs?: Record; + use_defaults?: boolean; + schedule_name?: string; + cron?: string; + timezone?: string; +} + +export type RunAgentToolOutput = + | SetupRequirementsResponse + | ExecutionStartedResponse + | AgentDetailsResponse + | NeedLoginResponse + | ErrorResponse; + +const RUN_AGENT_OUTPUT_TYPES = new Set([ + ResponseType.setup_requirements, + ResponseType.execution_started, + ResponseType.agent_details, + ResponseType.need_login, + ResponseType.error, +]); + +export function isRunAgentSetupRequirementsOutput( + output: RunAgentToolOutput, +): output is SetupRequirementsResponse { + return ( + output.type === ResponseType.setup_requirements || + ("setup_info" in output && typeof output.setup_info === "object") + ); +} + +export function isRunAgentExecutionStartedOutput( + output: RunAgentToolOutput, +): output is ExecutionStartedResponse { + return ( + output.type === ResponseType.execution_started || "execution_id" in output + ); +} + +export function isRunAgentAgentDetailsOutput( + output: RunAgentToolOutput, +): output is AgentDetailsResponse { + return output.type === ResponseType.agent_details || "agent" in output; +} + +export function isRunAgentNeedLoginOutput( + output: RunAgentToolOutput, +): output is NeedLoginResponse { + return output.type === ResponseType.need_login; +} + +export function isRunAgentErrorOutput( + output: RunAgentToolOutput, +): output is ErrorResponse { + return output.type === ResponseType.error || "error" in output; +} + +function parseOutput(output: unknown): RunAgentToolOutput | null { + if (!output) return null; + if (typeof output === "string") { + const trimmed = output.trim(); + if (!trimmed) return null; + try { + return parseOutput(JSON.parse(trimmed) as unknown); + } catch { + return null; + } + } + if (typeof output === "object") { + const type = (output as { type?: unknown }).type; + if (typeof type === "string" && RUN_AGENT_OUTPUT_TYPES.has(type)) { + return output as RunAgentToolOutput; + } + if ("execution_id" in output) return output as ExecutionStartedResponse; + if ("setup_info" in output) return output as SetupRequirementsResponse; + if ("agent" in output) return output as AgentDetailsResponse; + if ("error" in output || "details" in output) + return output as ErrorResponse; + if (type === ResponseType.need_login) return output as NeedLoginResponse; + } + return null; +} + +export function getRunAgentToolOutput( + part: unknown, +): RunAgentToolOutput | null { + if (!part || typeof part !== "object") return null; + return parseOutput((part as { output?: unknown }).output); +} + +function getAgentIdentifierText( + input: RunAgentInput | undefined, +): string | null { + if (!input) return null; + const slug = input.username_agent_slug?.trim(); + if (slug) return slug; + const libraryId = input.library_agent_id?.trim(); + if (libraryId) return `Library agent ${libraryId}`; + return null; +} + +export function getAnimationText(part: { + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +}): string { + const input = part.input as RunAgentInput | undefined; + const agentIdentifier = getAgentIdentifierText(input); + const isSchedule = Boolean( + input?.schedule_name?.trim() || input?.cron?.trim(), + ); + const actionPhrase = isSchedule + ? "Scheduling the agent to run" + : "Running the agent"; + const identifierText = agentIdentifier ? ` "${agentIdentifier}"` : ""; + + switch (part.state) { + case "input-streaming": + case "input-available": + return `${actionPhrase}${identifierText}`; + case "output-available": { + const output = parseOutput(part.output); + if (!output) return `${actionPhrase}${identifierText}`; + if (isRunAgentExecutionStartedOutput(output)) { + return `Started "${output.graph_name}"`; + } + if (isRunAgentAgentDetailsOutput(output)) { + return `Agent inputs needed for "${output.agent.name}"`; + } + if (isRunAgentSetupRequirementsOutput(output)) { + return `Setup needed for "${output.setup_info.agent_name}"`; + } + if (isRunAgentNeedLoginOutput(output)) + return "Sign in required to run agent"; + return "Error running agent"; + } + case "output-error": + return "Error running agent"; + default: + return actionPhrase; + } +} + +export function ToolIcon({ + isStreaming, + isError, +}: { + isStreaming?: boolean; + isError?: boolean; +}) { + if (isError) { + return ( + + ); + } + if (isStreaming) { + return ; + } + return ; +} + +export function AccordionIcon() { + return ; +} + +export function formatMaybeJson(value: unknown): string { + if (typeof value === "string") return value; + try { + return JSON.stringify(value, null, 2); + } catch { + return String(value); + } +} + +export function getAccordionMeta(output: RunAgentToolOutput): { + icon: React.ReactNode; + title: string; + titleClassName?: string; + description?: string; +} { + const icon = ; + + if (isRunAgentExecutionStartedOutput(output)) { + const statusText = + typeof output.status === "string" && output.status.trim() + ? output.status.trim() + : "started"; + return { + icon: , + title: output.graph_name, + description: `Status: ${statusText}`, + }; + } + + if (isRunAgentAgentDetailsOutput(output)) { + return { + icon, + title: output.agent.name, + description: "Inputs required", + }; + } + + if (isRunAgentSetupRequirementsOutput(output)) { + const missingCredsCount = Object.keys( + (output.setup_info.user_readiness?.missing_credentials ?? {}) as Record< + string, + unknown + >, + ).length; + return { + icon, + title: output.setup_info.agent_name, + description: + missingCredsCount > 0 + ? `Missing ${missingCredsCount} credential${missingCredsCount === 1 ? "" : "s"}` + : output.message, + }; + } + + if (isRunAgentNeedLoginOutput(output)) { + return { icon, title: "Sign in required" }; + } + + return { + icon: ( + + ), + title: "Error", + titleClassName: "text-red-500", + }; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/RunBlock.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/RunBlock.tsx new file mode 100644 index 0000000000..ded344efa2 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/RunBlock.tsx @@ -0,0 +1,76 @@ +"use client"; + +import type { ToolUIPart } from "ai"; +import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation"; +import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion"; +import { BlockOutputCard } from "./components/BlockOutputCard/BlockOutputCard"; +import { ErrorCard } from "./components/ErrorCard/ErrorCard"; +import { SetupRequirementsCard } from "./components/SetupRequirementsCard/SetupRequirementsCard"; +import { + getAccordionMeta, + getAnimationText, + getRunBlockToolOutput, + isRunBlockBlockOutput, + isRunBlockErrorOutput, + isRunBlockSetupRequirementsOutput, + ToolIcon, +} from "./helpers"; + +export interface RunBlockToolPart { + type: string; + toolCallId: string; + state: ToolUIPart["state"]; + input?: unknown; + output?: unknown; +} + +interface Props { + part: RunBlockToolPart; +} + +export function RunBlockTool({ part }: Props) { + const text = getAnimationText(part); + const isStreaming = + part.state === "input-streaming" || part.state === "input-available"; + + const output = getRunBlockToolOutput(part); + const isError = + part.state === "output-error" || + (!!output && isRunBlockErrorOutput(output)); + const hasExpandableContent = + part.state === "output-available" && + !!output && + (isRunBlockBlockOutput(output) || + isRunBlockSetupRequirementsOutput(output) || + isRunBlockErrorOutput(output)); + + return ( +
+
+ + +
+ + {hasExpandableContent && output && ( + + {isRunBlockBlockOutput(output) && } + + {isRunBlockSetupRequirementsOutput(output) && ( + + )} + + {isRunBlockErrorOutput(output) && } + + )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/BlockOutputCard/BlockOutputCard.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/BlockOutputCard/BlockOutputCard.tsx new file mode 100644 index 0000000000..4051927653 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/copilot/tools/RunBlock/components/BlockOutputCard/BlockOutputCard.tsx @@ -0,0 +1,133 @@ +"use client"; + +import React, { useState } from "react"; +import { getGetWorkspaceDownloadFileByIdUrl } from "@/app/api/__generated__/endpoints/workspace/workspace"; +import { Button } from "@/components/atoms/Button/Button"; +import type { BlockOutputResponse } from "@/app/api/__generated__/models/blockOutputResponse"; +import { + globalRegistry, + OutputItem, +} from "@/components/contextual/OutputRenderers"; +import type { OutputMetadata } from "@/components/contextual/OutputRenderers"; +import { + ContentBadge, + ContentCard, + ContentCardTitle, + ContentGrid, + ContentMessage, +} from "../../../../components/ToolAccordion/AccordionContent"; + +interface Props { + output: BlockOutputResponse; +} + +const COLLAPSED_LIMIT = 3; + +function isWorkspaceRef(value: unknown): value is string { + return typeof value === "string" && value.startsWith("workspace://"); +} + +function resolveForRenderer(value: unknown): { + value: unknown; + metadata?: OutputMetadata; +} { + if (!isWorkspaceRef(value)) return { value }; + + const withoutPrefix = value.replace("workspace://", ""); + const fileId = withoutPrefix.split("#")[0]; + const apiPath = getGetWorkspaceDownloadFileByIdUrl(fileId); + const url = `/api/proxy${apiPath}`; + + const hashIndex = value.indexOf("#"); + const mimeHint = + hashIndex !== -1 ? value.slice(hashIndex + 1) || undefined : undefined; + + const metadata: OutputMetadata = {}; + if (mimeHint) { + metadata.mimeType = mimeHint; + if (mimeHint.startsWith("image/")) metadata.type = "image"; + else if (mimeHint.startsWith("video/")) metadata.type = "video"; + } + + return { value: url, metadata }; +} + +function RenderOutputValue({ value }: { value: unknown }) { + const resolved = resolveForRenderer(value); + const renderer = globalRegistry.getRenderer( + resolved.value, + resolved.metadata, + ); + + if (renderer) { + return ( + + ); + } + + // Fallback for audio workspace refs + if ( + isWorkspaceRef(value) && + resolved.metadata?.mimeType?.startsWith("audio/") + ) { + return ( +

Started:{" "} - {moment(data.started_at).format("YYYY-MM-DD HH:mm:ss")} + {data.started_at + ? format(data.started_at, "yyyy-MM-dd HH:mm:ss") + : "—"}

{data.stats && (

diff --git a/autogpt_platform/frontend/src/app/api/chat/sessions/[sessionId]/stream/route.ts b/autogpt_platform/frontend/src/app/api/chat/sessions/[sessionId]/stream/route.ts index d63eed0ca2..6facf80c58 100644 --- a/autogpt_platform/frontend/src/app/api/chat/sessions/[sessionId]/stream/route.ts +++ b/autogpt_platform/frontend/src/app/api/chat/sessions/[sessionId]/stream/route.ts @@ -88,39 +88,27 @@ export async function POST( } /** - * Legacy GET endpoint for backward compatibility + * Resume an active stream for a session. + * + * Called by the AI SDK's `useChat(resume: true)` on page load. + * Proxies to the backend which checks for an active stream and either + * replays it (200 + SSE) or returns 204 No Content. */ export async function GET( - request: NextRequest, + _request: NextRequest, { params }: { params: Promise<{ sessionId: string }> }, ) { const { sessionId } = await params; - const searchParams = request.nextUrl.searchParams; - const message = searchParams.get("message"); - const isUserMessage = searchParams.get("is_user_message"); - - if (!message) { - return new Response("Missing message parameter", { status: 400 }); - } try { - // Get auth token from server-side session const token = await getServerAuthToken(); - // Build backend URL const backendUrl = environment.getAGPTServerBaseUrl(); const streamUrl = new URL( `/api/chat/sessions/${sessionId}/stream`, backendUrl, ); - streamUrl.searchParams.set("message", message); - // Pass is_user_message parameter if provided - if (isUserMessage !== null) { - streamUrl.searchParams.set("is_user_message", isUserMessage); - } - - // Forward request to backend with auth header const headers: Record = { Accept: "text/event-stream", "Cache-Control": "no-cache", @@ -136,6 +124,11 @@ export async function GET( headers, }); + // 204 = no active stream to resume + if (response.status === 204) { + return new Response(null, { status: 204 }); + } + if (!response.ok) { const error = await response.text(); return new Response(error, { @@ -144,17 +137,17 @@ export async function GET( }); } - // Return the SSE stream directly return new Response(response.body, { headers: { "Content-Type": "text/event-stream", "Cache-Control": "no-cache, no-transform", Connection: "keep-alive", "X-Accel-Buffering": "no", + "x-vercel-ai-ui-message-stream": "v1", }, }); } catch (error) { - console.error("SSE proxy error:", error); + console.error("Resume stream proxy error:", error); return new Response( JSON.stringify({ error: "Failed to connect to chat service", diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index ccf5ad3e34..172419d27e 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -1018,6 +1018,58 @@ } } }, + "/api/chat/schema/tool-responses": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "[Dummy] Tool response type export for codegen", + "description": "This endpoint is not meant to be called. It exists solely to expose tool response models in the OpenAPI schema for frontend codegen.", + "operationId": "getV2[dummy] tool response type export for codegen", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/AgentsFoundResponse" }, + { "$ref": "#/components/schemas/NoResultsResponse" }, + { "$ref": "#/components/schemas/AgentDetailsResponse" }, + { + "$ref": "#/components/schemas/SetupRequirementsResponse" + }, + { "$ref": "#/components/schemas/ExecutionStartedResponse" }, + { "$ref": "#/components/schemas/NeedLoginResponse" }, + { "$ref": "#/components/schemas/ErrorResponse" }, + { + "$ref": "#/components/schemas/InputValidationErrorResponse" + }, + { "$ref": "#/components/schemas/AgentOutputResponse" }, + { + "$ref": "#/components/schemas/UnderstandingUpdatedResponse" + }, + { "$ref": "#/components/schemas/AgentPreviewResponse" }, + { "$ref": "#/components/schemas/AgentSavedResponse" }, + { + "$ref": "#/components/schemas/ClarificationNeededResponse" + }, + { "$ref": "#/components/schemas/BlockListResponse" }, + { "$ref": "#/components/schemas/BlockOutputResponse" }, + { "$ref": "#/components/schemas/DocSearchResultsResponse" }, + { "$ref": "#/components/schemas/DocPageResponse" }, + { "$ref": "#/components/schemas/OperationStartedResponse" }, + { "$ref": "#/components/schemas/OperationPendingResponse" }, + { + "$ref": "#/components/schemas/OperationInProgressResponse" + } + ], + "title": "Response Getv2[Dummy] Tool Response Type Export For Codegen" + } + } + } + } + } + } + }, "/api/chat/sessions": { "get": { "tags": ["v2", "chat", "chat"], @@ -1182,9 +1234,9 @@ "/api/chat/sessions/{session_id}/stream": { "get": { "tags": ["v2", "chat", "chat"], - "summary": "Stream Chat Get", - "description": "Stream chat responses for a session (GET - legacy endpoint).\n\nStreams the AI/completion responses in real time over Server-Sent Events (SSE), including:\n - Text fragments as they are generated\n - Tool call UI elements (if invoked)\n - Tool execution results\n\nArgs:\n session_id: The chat session identifier to associate with the streamed messages.\n message: The user's new message to process.\n user_id: Optional authenticated user ID.\n is_user_message: Whether the message is a user message.\nReturns:\n StreamingResponse: SSE-formatted response chunks.", - "operationId": "getV2StreamChatGet", + "summary": "Resume Session Stream", + "description": "Resume an active stream for a session.\n\nCalled by the AI SDK's ``useChat(resume: true)`` on page load.\nChecks for an active (in-progress) task on the session and either replays\nthe full SSE stream or returns 204 No Content if nothing is running.\n\nArgs:\n session_id: The chat session identifier.\n user_id: Optional authenticated user ID.\n\nReturns:\n StreamingResponse (SSE) when an active stream exists,\n or 204 No Content when there is nothing to resume.", + "operationId": "getV2ResumeSessionStream", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { @@ -1192,27 +1244,6 @@ "in": "path", "required": true, "schema": { "type": "string", "title": "Session Id" } - }, - { - "name": "message", - "in": "query", - "required": true, - "schema": { - "type": "string", - "minLength": 1, - "maxLength": 10000, - "title": "Message" - } - }, - { - "name": "is_user_message", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": true, - "title": "Is User Message" - } } ], "responses": { @@ -6358,6 +6389,75 @@ "required": ["new_balance", "transaction_key"], "title": "AddUserCreditsResponse" }, + "AgentDetails": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "description": { "type": "string", "title": "Description" }, + "in_library": { + "type": "boolean", + "title": "In Library", + "default": false + }, + "inputs": { + "additionalProperties": true, + "type": "object", + "title": "Inputs", + "default": {} + }, + "credentials": { + "items": { "$ref": "#/components/schemas/CredentialsMetaInput" }, + "type": "array", + "title": "Credentials", + "default": [] + }, + "execution_options": { + "$ref": "#/components/schemas/ExecutionOptions" + }, + "trigger_info": { + "anyOf": [ + { "additionalProperties": true, "type": "object" }, + { "type": "null" } + ], + "title": "Trigger Info" + } + }, + "type": "object", + "required": ["id", "name", "description"], + "title": "AgentDetails", + "description": "Detailed agent information." + }, + "AgentDetailsResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "agent_details" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "agent": { "$ref": "#/components/schemas/AgentDetails" }, + "user_authenticated": { + "type": "boolean", + "title": "User Authenticated", + "default": false + }, + "graph_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Graph Id" + }, + "graph_version": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version" + } + }, + "type": "object", + "required": ["message", "agent"], + "title": "AgentDetailsResponse", + "description": "Response for get_details action." + }, "AgentExecutionStatus": { "type": "string", "enum": [ @@ -6371,6 +6471,224 @@ ], "title": "AgentExecutionStatus" }, + "AgentInfo": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "description": { "type": "string", "title": "Description" }, + "source": { + "type": "string", + "title": "Source", + "description": "marketplace or library" + }, + "in_library": { + "type": "boolean", + "title": "In Library", + "default": false + }, + "creator": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Creator" + }, + "category": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Category" + }, + "rating": { + "anyOf": [{ "type": "number" }, { "type": "null" }], + "title": "Rating" + }, + "runs": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Runs" + }, + "is_featured": { + "anyOf": [{ "type": "boolean" }, { "type": "null" }], + "title": "Is Featured" + }, + "status": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Status" + }, + "can_access_graph": { + "anyOf": [{ "type": "boolean" }, { "type": "null" }], + "title": "Can Access Graph" + }, + "has_external_trigger": { + "anyOf": [{ "type": "boolean" }, { "type": "null" }], + "title": "Has External Trigger" + }, + "new_output": { + "anyOf": [{ "type": "boolean" }, { "type": "null" }], + "title": "New Output" + }, + "graph_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Graph Id" + }, + "inputs": { + "anyOf": [ + { "additionalProperties": true, "type": "object" }, + { "type": "null" } + ], + "title": "Inputs", + "description": "Input schema for the agent, including field names, types, and defaults" + } + }, + "type": "object", + "required": ["id", "name", "description", "source"], + "title": "AgentInfo", + "description": "Information about an agent." + }, + "AgentOutputResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "agent_output" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "agent_name": { "type": "string", "title": "Agent Name" }, + "agent_id": { "type": "string", "title": "Agent Id" }, + "library_agent_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Library Agent Id" + }, + "library_agent_link": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Library Agent Link" + }, + "execution": { + "anyOf": [ + { "$ref": "#/components/schemas/ExecutionOutputInfo" }, + { "type": "null" } + ] + }, + "available_executions": { + "anyOf": [ + { + "items": { "additionalProperties": true, "type": "object" }, + "type": "array" + }, + { "type": "null" } + ], + "title": "Available Executions" + }, + "total_executions": { + "type": "integer", + "title": "Total Executions", + "default": 0 + } + }, + "type": "object", + "required": ["message", "agent_name", "agent_id"], + "title": "AgentOutputResponse", + "description": "Response for agent_output tool." + }, + "AgentPreviewResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "agent_preview" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "agent_json": { + "additionalProperties": true, + "type": "object", + "title": "Agent Json" + }, + "agent_name": { "type": "string", "title": "Agent Name" }, + "description": { "type": "string", "title": "Description" }, + "node_count": { "type": "integer", "title": "Node Count" }, + "link_count": { + "type": "integer", + "title": "Link Count", + "default": 0 + } + }, + "type": "object", + "required": [ + "message", + "agent_json", + "agent_name", + "description", + "node_count" + ], + "title": "AgentPreviewResponse", + "description": "Response for previewing a generated agent before saving." + }, + "AgentSavedResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "agent_saved" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "agent_id": { "type": "string", "title": "Agent Id" }, + "agent_name": { "type": "string", "title": "Agent Name" }, + "library_agent_id": { "type": "string", "title": "Library Agent Id" }, + "library_agent_link": { + "type": "string", + "title": "Library Agent Link" + }, + "agent_page_link": { "type": "string", "title": "Agent Page Link" } + }, + "type": "object", + "required": [ + "message", + "agent_id", + "agent_name", + "library_agent_id", + "library_agent_link", + "agent_page_link" + ], + "title": "AgentSavedResponse", + "description": "Response when an agent is saved to the library." + }, + "AgentsFoundResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "agents_found" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "title": { + "type": "string", + "title": "Title", + "default": "Available Agents" + }, + "agents": { + "items": { "$ref": "#/components/schemas/AgentInfo" }, + "type": "array", + "title": "Agents" + }, + "count": { "type": "integer", "title": "Count" }, + "name": { + "type": "string", + "title": "Name", + "default": "agents_found" + } + }, + "type": "object", + "required": ["message", "agents", "count"], + "title": "AgentsFoundResponse", + "description": "Response for find_agent tool." + }, "ApiResponse": { "properties": { "answer": { "type": "string", "title": "Answer" }, @@ -6691,6 +7009,120 @@ ], "title": "BlockInfo" }, + "BlockInfoSummary": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "description": { "type": "string", "title": "Description" }, + "categories": { + "items": { "type": "string" }, + "type": "array", + "title": "Categories" + }, + "input_schema": { + "additionalProperties": true, + "type": "object", + "title": "Input Schema" + }, + "output_schema": { + "additionalProperties": true, + "type": "object", + "title": "Output Schema" + }, + "required_inputs": { + "items": { "$ref": "#/components/schemas/BlockInputFieldInfo" }, + "type": "array", + "title": "Required Inputs", + "description": "List of required input fields for this block" + } + }, + "type": "object", + "required": [ + "id", + "name", + "description", + "categories", + "input_schema", + "output_schema" + ], + "title": "BlockInfoSummary", + "description": "Summary of a block for search results." + }, + "BlockInputFieldInfo": { + "properties": { + "name": { "type": "string", "title": "Name" }, + "type": { "type": "string", "title": "Type" }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "required": { + "type": "boolean", + "title": "Required", + "default": false + }, + "default": { "anyOf": [{}, { "type": "null" }], "title": "Default" } + }, + "type": "object", + "required": ["name", "type"], + "title": "BlockInputFieldInfo", + "description": "Information about a block input field." + }, + "BlockListResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "block_list" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "blocks": { + "items": { "$ref": "#/components/schemas/BlockInfoSummary" }, + "type": "array", + "title": "Blocks" + }, + "count": { "type": "integer", "title": "Count" }, + "query": { "type": "string", "title": "Query" }, + "usage_hint": { + "type": "string", + "title": "Usage Hint", + "default": "To execute a block, call run_block with block_id set to the block's 'id' field and input_data containing the required fields from input_schema." + } + }, + "type": "object", + "required": ["message", "blocks", "count", "query"], + "title": "BlockListResponse", + "description": "Response for find_block tool." + }, + "BlockOutputResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "block_output" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "block_id": { "type": "string", "title": "Block Id" }, + "block_name": { "type": "string", "title": "Block Name" }, + "outputs": { + "additionalProperties": { "items": {}, "type": "array" }, + "type": "object", + "title": "Outputs" + }, + "success": { "type": "boolean", "title": "Success", "default": true } + }, + "type": "object", + "required": ["message", "block_id", "block_name", "outputs"], + "title": "BlockOutputResponse", + "description": "Response for run_block tool." + }, "BlockResponse": { "properties": { "blocks": { @@ -6937,6 +7369,42 @@ "required": ["query", "conversation_history", "message_id"], "title": "ChatRequest" }, + "ClarificationNeededResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "clarification_needed" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "questions": { + "items": { "$ref": "#/components/schemas/ClarifyingQuestion" }, + "type": "array", + "title": "Questions" + } + }, + "type": "object", + "required": ["message"], + "title": "ClarificationNeededResponse", + "description": "Response when the LLM needs more information from the user." + }, + "ClarifyingQuestion": { + "properties": { + "question": { "type": "string", "title": "Question" }, + "keyword": { "type": "string", "title": "Keyword" }, + "example": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Example" + } + }, + "type": "object", + "required": ["question", "keyword"], + "title": "ClarifyingQuestion", + "description": "A question that needs user clarification." + }, "CountResponse": { "properties": { "all_blocks": { "type": "integer", "title": "All Blocks" }, @@ -7195,6 +7663,71 @@ "required": ["version_counts"], "title": "DeleteGraphResponse" }, + "DocPageResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "doc_page" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "title": { "type": "string", "title": "Title" }, + "path": { "type": "string", "title": "Path" }, + "content": { "type": "string", "title": "Content" }, + "doc_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Doc Url" + } + }, + "type": "object", + "required": ["message", "title", "path", "content"], + "title": "DocPageResponse", + "description": "Response for get_doc_page tool." + }, + "DocSearchResult": { + "properties": { + "title": { "type": "string", "title": "Title" }, + "path": { "type": "string", "title": "Path" }, + "section": { "type": "string", "title": "Section" }, + "snippet": { "type": "string", "title": "Snippet" }, + "score": { "type": "number", "title": "Score" }, + "doc_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Doc Url" + } + }, + "type": "object", + "required": ["title", "path", "section", "snippet", "score"], + "title": "DocSearchResult", + "description": "A single documentation search result." + }, + "DocSearchResultsResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "doc_search_results" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "results": { + "items": { "$ref": "#/components/schemas/DocSearchResult" }, + "type": "array", + "title": "Results" + }, + "count": { "type": "integer", "title": "Count" }, + "query": { "type": "string", "title": "Query" } + }, + "type": "object", + "required": ["message", "results", "count", "query"], + "title": "DocSearchResultsResponse", + "description": "Response for search_docs tool." + }, "Document": { "properties": { "url": { "type": "string", "title": "Url" }, @@ -7204,6 +7737,34 @@ "required": ["url", "relevance_score"], "title": "Document" }, + "ErrorResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "error" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "error": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Error" + }, + "details": { + "anyOf": [ + { "additionalProperties": true, "type": "object" }, + { "type": "null" } + ], + "title": "Details" + } + }, + "type": "object", + "required": ["message"], + "title": "ErrorResponse", + "description": "Response for errors." + }, "ExecutionAnalyticsConfig": { "properties": { "available_models": { @@ -7380,6 +7941,85 @@ ], "title": "ExecutionAnalyticsResult" }, + "ExecutionOptions": { + "properties": { + "manual": { "type": "boolean", "title": "Manual", "default": true }, + "scheduled": { + "type": "boolean", + "title": "Scheduled", + "default": true + }, + "webhook": { "type": "boolean", "title": "Webhook", "default": false } + }, + "type": "object", + "title": "ExecutionOptions", + "description": "Available execution options for an agent." + }, + "ExecutionOutputInfo": { + "properties": { + "execution_id": { "type": "string", "title": "Execution Id" }, + "status": { "type": "string", "title": "Status" }, + "started_at": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Started At" + }, + "ended_at": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Ended At" + }, + "outputs": { + "additionalProperties": { "items": {}, "type": "array" }, + "type": "object", + "title": "Outputs" + }, + "inputs_summary": { + "anyOf": [ + { "additionalProperties": true, "type": "object" }, + { "type": "null" } + ], + "title": "Inputs Summary" + } + }, + "type": "object", + "required": ["execution_id", "status", "outputs"], + "title": "ExecutionOutputInfo", + "description": "Summary of a single execution's outputs." + }, + "ExecutionStartedResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "execution_started" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "execution_id": { "type": "string", "title": "Execution Id" }, + "graph_id": { "type": "string", "title": "Graph Id" }, + "graph_name": { "type": "string", "title": "Graph Name" }, + "library_agent_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Library Agent Id" + }, + "library_agent_link": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Library Agent Link" + }, + "status": { "type": "string", "title": "Status", "default": "QUEUED" } + }, + "type": "object", + "required": ["message", "execution_id", "graph_id", "graph_name"], + "title": "ExecutionStartedResponse", + "description": "Response for run/schedule actions." + }, "Graph": { "properties": { "id": { "type": "string", "title": "Id" }, @@ -8131,6 +8771,43 @@ "required": ["provider", "host"], "title": "HostScopedCredentials" }, + "InputValidationErrorResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "input_validation_error" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "unrecognized_fields": { + "items": { "type": "string" }, + "type": "array", + "title": "Unrecognized Fields", + "description": "List of input field names that were not recognized" + }, + "inputs": { + "additionalProperties": true, + "type": "object", + "title": "Inputs", + "description": "The agent's valid input schema for reference" + }, + "graph_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Graph Id" + }, + "graph_version": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version" + } + }, + "type": "object", + "required": ["message", "unrecognized_fields", "inputs"], + "title": "InputValidationErrorResponse", + "description": "Response when run_agent receives unknown input fields." + }, "LibraryAgent": { "properties": { "id": { "type": "string", "title": "Id" }, @@ -8649,6 +9326,54 @@ "required": ["agents", "pagination"], "title": "MyAgentsResponse" }, + "NeedLoginResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "need_login" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "agent_info": { + "anyOf": [ + { "additionalProperties": true, "type": "object" }, + { "type": "null" } + ], + "title": "Agent Info" + } + }, + "type": "object", + "required": ["message"], + "title": "NeedLoginResponse", + "description": "Response when login is needed." + }, + "NoResultsResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "no_results" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "suggestions": { + "items": { "type": "string" }, + "type": "array", + "title": "Suggestions", + "default": [] + }, + "name": { "type": "string", "title": "Name", "default": "no_results" } + }, + "type": "object", + "required": ["message"], + "title": "NoResultsResponse", + "description": "Response when no agents found." + }, "Node": { "properties": { "id": { "type": "string", "title": "Id" }, @@ -9058,6 +9783,66 @@ "title": "OperationCompleteRequest", "description": "Request model for external completion webhook." }, + "OperationInProgressResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "operation_in_progress" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "tool_call_id": { "type": "string", "title": "Tool Call Id" } + }, + "type": "object", + "required": ["message", "tool_call_id"], + "title": "OperationInProgressResponse", + "description": "Response when an operation is already in progress.\n\nReturned for idempotency when the same tool_call_id is requested again\nwhile the background task is still running." + }, + "OperationPendingResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "operation_pending" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "operation_id": { "type": "string", "title": "Operation Id" }, + "tool_name": { "type": "string", "title": "Tool Name" } + }, + "type": "object", + "required": ["message", "operation_id", "tool_name"], + "title": "OperationPendingResponse", + "description": "Response stored in chat history while a long-running operation is executing.\n\nThis is persisted to the database so users see a pending state when they\nrefresh before the operation completes." + }, + "OperationStartedResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "operation_started" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "operation_id": { "type": "string", "title": "Operation Id" }, + "tool_name": { "type": "string", "title": "Tool Name" }, + "task_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Task Id" + } + }, + "type": "object", + "required": ["message", "operation_id", "tool_name"], + "title": "OperationStartedResponse", + "description": "Response when a long-running operation has been started in the background.\n\nThis is returned immediately to the client while the operation continues\nto execute. The user can close the tab and check back later.\n\nThe task_id can be used to reconnect to the SSE stream via\nGET /chat/tasks/{task_id}/stream?last_idx=0" + }, "Pagination": { "properties": { "total_items": { @@ -9689,6 +10474,38 @@ "required": ["credit_amount"], "title": "RequestTopUp" }, + "ResponseType": { + "type": "string", + "enum": [ + "agents_found", + "agent_details", + "setup_requirements", + "execution_started", + "need_login", + "error", + "no_results", + "agent_output", + "understanding_updated", + "agent_preview", + "agent_saved", + "clarification_needed", + "block_list", + "block_output", + "doc_search_results", + "doc_page", + "workspace_file_list", + "workspace_file_content", + "workspace_file_metadata", + "workspace_file_written", + "workspace_file_deleted", + "operation_started", + "operation_pending", + "operation_in_progress", + "input_validation_error" + ], + "title": "ResponseType", + "description": "Types of tool responses." + }, "ReviewItem": { "properties": { "node_exec_id": { @@ -9952,6 +10769,48 @@ "required": ["active_graph_version"], "title": "SetGraphActiveVersion" }, + "SetupInfo": { + "properties": { + "agent_id": { "type": "string", "title": "Agent Id" }, + "agent_name": { "type": "string", "title": "Agent Name" }, + "requirements": { + "additionalProperties": { "items": {}, "type": "array" }, + "type": "object", + "title": "Requirements" + }, + "user_readiness": { "$ref": "#/components/schemas/UserReadiness" } + }, + "type": "object", + "required": ["agent_id", "agent_name"], + "title": "SetupInfo", + "description": "Complete setup information." + }, + "SetupRequirementsResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "setup_requirements" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "setup_info": { "$ref": "#/components/schemas/SetupInfo" }, + "graph_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Graph Id" + }, + "graph_version": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version" + } + }, + "type": "object", + "required": ["message", "setup_info"], + "title": "SetupRequirementsResponse", + "description": "Response for validate action." + }, "ShareRequest": { "properties": {}, "type": "object", @@ -11348,6 +12207,33 @@ "required": ["name", "graph_id", "graph_version", "trigger_config"], "title": "TriggeredPresetSetupRequest" }, + "UnderstandingUpdatedResponse": { + "properties": { + "type": { + "$ref": "#/components/schemas/ResponseType", + "default": "understanding_updated" + }, + "message": { "type": "string", "title": "Message" }, + "session_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Session Id" + }, + "updated_fields": { + "items": { "type": "string" }, + "type": "array", + "title": "Updated Fields" + }, + "current_understanding": { + "additionalProperties": true, + "type": "object", + "title": "Current Understanding" + } + }, + "type": "object", + "required": ["message"], + "title": "UnderstandingUpdatedResponse", + "description": "Response for add_understanding tool." + }, "UnifiedSearchResponse": { "properties": { "results": { @@ -12226,6 +13112,29 @@ "required": ["provider", "username", "password"], "title": "UserPasswordCredentials" }, + "UserReadiness": { + "properties": { + "has_all_credentials": { + "type": "boolean", + "title": "Has All Credentials", + "default": false + }, + "missing_credentials": { + "additionalProperties": true, + "type": "object", + "title": "Missing Credentials", + "default": {} + }, + "ready_to_run": { + "type": "boolean", + "title": "Ready To Run", + "default": false + } + }, + "type": "object", + "title": "UserReadiness", + "description": "User readiness status." + }, "UserTransaction": { "properties": { "transaction_key": { diff --git a/autogpt_platform/frontend/src/app/globals.css b/autogpt_platform/frontend/src/app/globals.css index 1f782f753b..dd1d17cde7 100644 --- a/autogpt_platform/frontend/src/app/globals.css +++ b/autogpt_platform/frontend/src/app/globals.css @@ -1,6 +1,7 @@ @tailwind base; @tailwind components; @tailwind utilities; +@source "../node_modules/streamdown/dist/*.js"; @layer base { :root { @@ -29,6 +30,14 @@ --chart-3: 197 37% 24%; --chart-4: 43 74% 66%; --chart-5: 27 87% 67%; + --sidebar-background: 0 0% 98%; + --sidebar-foreground: 240 5.3% 26.1%; + --sidebar-primary: 240 5.9% 10%; + --sidebar-primary-foreground: 0 0% 98%; + --sidebar-accent: 240 4.8% 95.9%; + --sidebar-accent-foreground: 240 5.9% 10%; + --sidebar-border: 220 13% 91%; + --sidebar-ring: 217.2 91.2% 59.8%; } .dark { @@ -56,6 +65,14 @@ --chart-3: 30 80% 55%; --chart-4: 280 65% 60%; --chart-5: 340 75% 55%; + --sidebar-background: 240 5.9% 10%; + --sidebar-foreground: 240 4.8% 95.9%; + --sidebar-primary: 224.3 76.3% 48%; + --sidebar-primary-foreground: 0 0% 100%; + --sidebar-accent: 240 3.7% 15.9%; + --sidebar-accent-foreground: 240 4.8% 95.9%; + --sidebar-border: 240 3.7% 15.9%; + --sidebar-ring: 217.2 91.2% 59.8%; } * { diff --git a/autogpt_platform/frontend/src/components/ai-elements/conversation.tsx b/autogpt_platform/frontend/src/components/ai-elements/conversation.tsx new file mode 100644 index 0000000000..92e940c715 --- /dev/null +++ b/autogpt_platform/frontend/src/components/ai-elements/conversation.tsx @@ -0,0 +1,109 @@ +"use client"; + +import { Button } from "@/components/ui/button"; +import { scrollbarStyles } from "@/components/styles/scrollbars"; +import { cn } from "@/lib/utils"; +import { ArrowDownIcon } from "lucide-react"; +import type { ComponentProps } from "react"; +import { useCallback } from "react"; +import { StickToBottom, useStickToBottomContext } from "use-stick-to-bottom"; + +export type ConversationProps = ComponentProps; + +export const Conversation = ({ className, ...props }: ConversationProps) => ( + +); + +export type ConversationContentProps = ComponentProps< + typeof StickToBottom.Content +>; + +export const ConversationContent = ({ + className, + ...props +}: ConversationContentProps) => ( + +); + +export type ConversationEmptyStateProps = ComponentProps<"div"> & { + title?: string; + description?: string; + icon?: React.ReactNode; +}; + +export const ConversationEmptyState = ({ + className, + title = "No messages yet", + description = "Start a conversation to see messages here", + icon, + children, + ...props +}: ConversationEmptyStateProps) => ( +

+ {children ?? ( + <> + {icon && ( +
{icon}
+ )} +
+

{title}

+ {description && ( +

+ {description} +

+ )} +
+ + )} +
+); + +export type ConversationScrollButtonProps = ComponentProps; + +export const ConversationScrollButton = ({ + className, + ...props +}: ConversationScrollButtonProps) => { + const { isAtBottom, scrollToBottom } = useStickToBottomContext(); + + const handleScrollToBottom = useCallback(() => { + scrollToBottom(); + }, [scrollToBottom]); + + return ( + !isAtBottom && ( + + ) + ); +}; diff --git a/autogpt_platform/frontend/src/components/ai-elements/message.tsx b/autogpt_platform/frontend/src/components/ai-elements/message.tsx new file mode 100644 index 0000000000..5cc330e57c --- /dev/null +++ b/autogpt_platform/frontend/src/components/ai-elements/message.tsx @@ -0,0 +1,338 @@ +"use client"; + +import { Button } from "@/components/ui/button"; +import { ButtonGroup, ButtonGroupText } from "@/components/ui/button-group"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; +import { cjk } from "@streamdown/cjk"; +import { code } from "@streamdown/code"; +import { math } from "@streamdown/math"; +import { mermaid } from "@streamdown/mermaid"; +import type { UIMessage } from "ai"; +import { ChevronLeftIcon, ChevronRightIcon } from "lucide-react"; +import type { ComponentProps, HTMLAttributes, ReactElement } from "react"; +import { createContext, memo, useContext, useEffect, useState } from "react"; +import { Streamdown } from "streamdown"; + +export type MessageProps = HTMLAttributes & { + from: UIMessage["role"]; +}; + +export const Message = ({ className, from, ...props }: MessageProps) => ( +
+); + +export type MessageContentProps = HTMLAttributes; + +export const MessageContent = ({ + children, + className, + ...props +}: MessageContentProps) => ( +
+ {children} +
+); + +export type MessageActionsProps = ComponentProps<"div">; + +export const MessageActions = ({ + className, + children, + ...props +}: MessageActionsProps) => ( +
+ {children} +
+); + +export type MessageActionProps = ComponentProps & { + tooltip?: string; + label?: string; +}; + +export const MessageAction = ({ + tooltip, + children, + label, + variant = "ghost", + size = "icon-sm", + ...props +}: MessageActionProps) => { + const button = ( + + ); + + if (tooltip) { + return ( + + + {button} + +

{tooltip}

+
+
+
+ ); + } + + return button; +}; + +interface MessageBranchContextType { + currentBranch: number; + totalBranches: number; + goToPrevious: () => void; + goToNext: () => void; + branches: ReactElement[]; + setBranches: (branches: ReactElement[]) => void; +} + +const MessageBranchContext = createContext( + null, +); + +const useMessageBranch = () => { + const context = useContext(MessageBranchContext); + + if (!context) { + throw new Error("MessageBranch components must be used within"); + } + + return context; +}; + +export type MessageBranchProps = HTMLAttributes & { + defaultBranch?: number; + onBranchChange?: (branchIndex: number) => void; +}; + +export const MessageBranch = ({ + defaultBranch = 0, + onBranchChange, + className, + ...props +}: MessageBranchProps) => { + const [currentBranch, setCurrentBranch] = useState(defaultBranch); + const [branches, setBranches] = useState([]); + + const handleBranchChange = (newBranch: number) => { + setCurrentBranch(newBranch); + onBranchChange?.(newBranch); + }; + + const goToPrevious = () => { + const newBranch = + currentBranch > 0 ? currentBranch - 1 : branches.length - 1; + handleBranchChange(newBranch); + }; + + const goToNext = () => { + const newBranch = + currentBranch < branches.length - 1 ? currentBranch + 1 : 0; + handleBranchChange(newBranch); + }; + + const contextValue: MessageBranchContextType = { + currentBranch, + totalBranches: branches.length, + goToPrevious, + goToNext, + branches, + setBranches, + }; + + return ( + +
div]:pb-0", className)} + {...props} + /> + + ); +}; + +export type MessageBranchContentProps = HTMLAttributes; + +export const MessageBranchContent = ({ + children, + ...props +}: MessageBranchContentProps) => { + const { currentBranch, setBranches, branches } = useMessageBranch(); + const childrenArray = Array.isArray(children) ? children : [children]; + + // Use useEffect to update branches when they change + useEffect(() => { + if (branches.length !== childrenArray.length) { + setBranches(childrenArray); + } + }, [childrenArray, branches, setBranches]); + + return childrenArray.map((branch, index) => ( +
div]:pb-0", + index === currentBranch ? "block" : "hidden", + )} + key={branch.key} + {...props} + > + {branch} +
+ )); +}; + +export type MessageBranchSelectorProps = HTMLAttributes & { + from: UIMessage["role"]; +}; + +export const MessageBranchSelector = ({ + className, + from: _from, + ...props +}: MessageBranchSelectorProps) => { + const { totalBranches } = useMessageBranch(); + + // Don't render if there's only one branch + if (totalBranches <= 1) { + return null; + } + + return ( + *:not(:first-child)]:rounded-l-md [&>*:not(:last-child)]:rounded-r-md", + className, + )} + orientation="horizontal" + {...props} + /> + ); +}; + +export type MessageBranchPreviousProps = ComponentProps; + +export const MessageBranchPrevious = ({ + children, + ...props +}: MessageBranchPreviousProps) => { + const { goToPrevious, totalBranches } = useMessageBranch(); + + return ( + + ); +}; + +export type MessageBranchNextProps = ComponentProps; + +export const MessageBranchNext = ({ + children, + ...props +}: MessageBranchNextProps) => { + const { goToNext, totalBranches } = useMessageBranch(); + + return ( + + ); +}; + +export type MessageBranchPageProps = HTMLAttributes; + +export const MessageBranchPage = ({ + className, + ...props +}: MessageBranchPageProps) => { + const { currentBranch, totalBranches } = useMessageBranch(); + + return ( + + {currentBranch + 1} of {totalBranches} + + ); +}; + +export type MessageResponseProps = ComponentProps; + +export const MessageResponse = memo( + ({ className, ...props }: MessageResponseProps) => ( + *:first-child]:mt-0 [&>*:last-child]:mb-0 [&_pre]:!bg-white", + className, + )} + plugins={{ code, mermaid, math, cjk }} + {...props} + /> + ), + (prevProps, nextProps) => prevProps.children === nextProps.children, +); + +MessageResponse.displayName = "MessageResponse"; + +export type MessageToolbarProps = ComponentProps<"div">; + +export const MessageToolbar = ({ + className, + children, + ...props +}: MessageToolbarProps) => ( +
+ {children} +
+); diff --git a/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx index efc345f79c..b118cc5aa0 100644 --- a/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx +++ b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx @@ -77,7 +77,7 @@ export function OverflowText(props: Props) { "block min-w-0 overflow-hidden text-ellipsis whitespace-nowrap", )} > - + {value} diff --git a/autogpt_platform/frontend/src/components/atoms/Text/Text.tsx b/autogpt_platform/frontend/src/components/atoms/Text/Text.tsx index 8bae184e5b..86c39b6436 100644 --- a/autogpt_platform/frontend/src/components/atoms/Text/Text.tsx +++ b/autogpt_platform/frontend/src/components/atoms/Text/Text.tsx @@ -1,4 +1,5 @@ import React from "react"; +import { cn } from "@/lib/utils"; import { As, Variant, variantElementMap, variants } from "./helpers"; type CustomProps = { @@ -22,7 +23,7 @@ export function Text({ }: TextProps) { const variantClasses = variants[size || variant] || variants.body; const Element = outerAs || variantElementMap[variant]; - const combinedClassName = `${variantClasses} ${className}`.trim(); + const combinedClassName = cn(variantClasses, className); return React.createElement( Element, diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx deleted file mode 100644 index da454150bf..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx +++ /dev/null @@ -1,114 +0,0 @@ -"use client"; - -import { useCopilotSessionId } from "@/app/(platform)/copilot/useCopilotSessionId"; -import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; -import { Text } from "@/components/atoms/Text/Text"; -import { cn } from "@/lib/utils"; -import { useEffect, useRef } from "react"; -import { ChatContainer } from "./components/ChatContainer/ChatContainer"; -import { ChatErrorState } from "./components/ChatErrorState/ChatErrorState"; -import { useChat } from "./useChat"; - -export interface ChatProps { - className?: string; - initialPrompt?: string; - onSessionNotFound?: () => void; - onStreamingChange?: (isStreaming: boolean) => void; -} - -export function Chat({ - className, - initialPrompt, - onSessionNotFound, - onStreamingChange, -}: ChatProps) { - const { urlSessionId } = useCopilotSessionId(); - const hasHandledNotFoundRef = useRef(false); - const { - session, - messages, - isLoading, - isCreating, - error, - isSessionNotFound, - sessionId, - createSession, - showLoader, - startPollingForOperation, - } = useChat({ urlSessionId }); - - // Extract active stream info for reconnection - const activeStream = ( - session as { - active_stream?: { - task_id: string; - last_message_id: string; - operation_id: string; - tool_name: string; - }; - } - )?.active_stream; - - useEffect(() => { - if (!onSessionNotFound) return; - if (!urlSessionId) return; - if (!isSessionNotFound || isLoading || isCreating) return; - if (hasHandledNotFoundRef.current) return; - hasHandledNotFoundRef.current = true; - onSessionNotFound(); - }, [ - onSessionNotFound, - urlSessionId, - isSessionNotFound, - isLoading, - isCreating, - ]); - - const shouldShowLoader = showLoader && (isLoading || isCreating); - - return ( -
- {/* Main Content */} -
- {/* Loading State */} - {shouldShowLoader && ( -
-
- - - Loading your chat... - -
-
- )} - - {/* Error State */} - {error && !isLoading && ( - - )} - - {/* Session Content */} - {sessionId && !isLoading && !error && ( - - )} -
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/SSE_RECONNECTION.md b/autogpt_platform/frontend/src/components/contextual/Chat/SSE_RECONNECTION.md deleted file mode 100644 index 9e78679f4e..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/SSE_RECONNECTION.md +++ /dev/null @@ -1,159 +0,0 @@ -# SSE Reconnection Contract for Long-Running Operations - -This document describes the client-side contract for handling SSE (Server-Sent Events) disconnections and reconnecting to long-running background tasks. - -## Overview - -When a user triggers a long-running operation (like agent generation), the backend: - -1. Spawns a background task that survives SSE disconnections -2. Returns an `operation_started` response with a `task_id` -3. Stores stream messages in Redis Streams for replay - -Clients can reconnect to the task stream at any time to receive missed messages. - -## Client-Side Flow - -### 1. Receiving Operation Started - -When you receive an `operation_started` tool response: - -```typescript -// The response includes a task_id for reconnection -{ - type: "operation_started", - tool_name: "generate_agent", - operation_id: "uuid-...", - task_id: "task-uuid-...", // <-- Store this for reconnection - message: "Operation started. You can close this tab." -} -``` - -### 2. Storing Task Info - -Use the chat store to track the active task: - -```typescript -import { useChatStore } from "./chat-store"; - -// When operation_started is received: -useChatStore.getState().setActiveTask(sessionId, { - taskId: response.task_id, - operationId: response.operation_id, - toolName: response.tool_name, - lastMessageId: "0", -}); -``` - -### 3. Reconnecting to a Task - -To reconnect (e.g., after page refresh or tab reopen): - -```typescript -const { reconnectToTask, getActiveTask } = useChatStore.getState(); - -// Check if there's an active task for this session -const activeTask = getActiveTask(sessionId); - -if (activeTask) { - // Reconnect to the task stream - await reconnectToTask( - sessionId, - activeTask.taskId, - activeTask.lastMessageId, // Resume from last position - (chunk) => { - // Handle incoming chunks - console.log("Received chunk:", chunk); - }, - ); -} -``` - -### 4. Tracking Message Position - -To enable precise replay, update the last message ID as chunks arrive: - -```typescript -const { updateTaskLastMessageId } = useChatStore.getState(); - -function handleChunk(chunk: StreamChunk) { - // If chunk has an index/id, track it - if (chunk.idx !== undefined) { - updateTaskLastMessageId(sessionId, String(chunk.idx)); - } -} -``` - -## API Endpoints - -### Task Stream Reconnection - -``` -GET /api/chat/tasks/{taskId}/stream?last_message_id={idx} -``` - -- `taskId`: The task ID from `operation_started` -- `last_message_id`: Last received message index (default: "0" for full replay) - -Returns: SSE stream of missed messages + live updates - -## Chunk Types - -The reconnected stream follows the same Vercel AI SDK protocol: - -| Type | Description | -| ----------------------- | ----------------------- | -| `start` | Message lifecycle start | -| `text-delta` | Streaming text content | -| `text-end` | Text block completed | -| `tool-output-available` | Tool result available | -| `finish` | Stream completed | -| `error` | Error occurred | - -## Error Handling - -If reconnection fails: - -1. Check if task still exists (may have expired - default TTL: 1 hour) -2. Fall back to polling the session for final state -3. Show appropriate UI message to user - -## Persistence Considerations - -For robust reconnection across browser restarts: - -```typescript -// Store in localStorage/sessionStorage -const ACTIVE_TASKS_KEY = "chat_active_tasks"; - -function persistActiveTask(sessionId: string, task: ActiveTaskInfo) { - const tasks = JSON.parse(localStorage.getItem(ACTIVE_TASKS_KEY) || "{}"); - tasks[sessionId] = task; - localStorage.setItem(ACTIVE_TASKS_KEY, JSON.stringify(tasks)); -} - -function loadPersistedTasks(): Record { - return JSON.parse(localStorage.getItem(ACTIVE_TASKS_KEY) || "{}"); -} -``` - -## Backend Configuration - -The following backend settings affect reconnection behavior: - -| Setting | Default | Description | -| ------------------- | ------- | ---------------------------------- | -| `stream_ttl` | 3600s | How long streams are kept in Redis | -| `stream_max_length` | 1000 | Max messages per stream | - -## Testing - -To test reconnection locally: - -1. Start a long-running operation (e.g., agent generation) -2. Note the `task_id` from the `operation_started` response -3. Close the browser tab -4. Reopen and call `reconnectToTask` with the saved `task_id` -5. Verify that missed messages are replayed - -See the main README for full local development setup. diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/chat-constants.ts b/autogpt_platform/frontend/src/components/contextual/Chat/chat-constants.ts deleted file mode 100644 index 8802de2155..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/chat-constants.ts +++ /dev/null @@ -1,16 +0,0 @@ -/** - * Constants for the chat system. - * - * Centralizes magic strings and values used across chat components. - */ - -// LocalStorage keys -export const STORAGE_KEY_ACTIVE_TASKS = "chat_active_tasks"; - -// Redis Stream IDs -export const INITIAL_MESSAGE_ID = "0"; -export const INITIAL_STREAM_ID = "0-0"; - -// TTL values (in milliseconds) -export const COMPLETED_STREAM_TTL_MS = 5 * 60 * 1000; // 5 minutes -export const ACTIVE_TASK_TTL_MS = 60 * 60 * 1000; // 1 hour diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts b/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts deleted file mode 100644 index 3083f65d2c..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts +++ /dev/null @@ -1,501 +0,0 @@ -"use client"; - -import { create } from "zustand"; -import { - ACTIVE_TASK_TTL_MS, - COMPLETED_STREAM_TTL_MS, - INITIAL_STREAM_ID, - STORAGE_KEY_ACTIVE_TASKS, -} from "./chat-constants"; -import type { - ActiveStream, - StreamChunk, - StreamCompleteCallback, - StreamResult, - StreamStatus, -} from "./chat-types"; -import { executeStream, executeTaskReconnect } from "./stream-executor"; - -export interface ActiveTaskInfo { - taskId: string; - sessionId: string; - operationId: string; - toolName: string; - lastMessageId: string; - startedAt: number; -} - -/** Load active tasks from localStorage */ -function loadPersistedTasks(): Map { - if (typeof window === "undefined") return new Map(); - try { - const stored = localStorage.getItem(STORAGE_KEY_ACTIVE_TASKS); - if (!stored) return new Map(); - const parsed = JSON.parse(stored) as Record; - const now = Date.now(); - const tasks = new Map(); - // Filter out expired tasks - for (const [sessionId, task] of Object.entries(parsed)) { - if (now - task.startedAt < ACTIVE_TASK_TTL_MS) { - tasks.set(sessionId, task); - } - } - return tasks; - } catch { - return new Map(); - } -} - -/** Save active tasks to localStorage */ -function persistTasks(tasks: Map): void { - if (typeof window === "undefined") return; - try { - const obj: Record = {}; - for (const [sessionId, task] of tasks) { - obj[sessionId] = task; - } - localStorage.setItem(STORAGE_KEY_ACTIVE_TASKS, JSON.stringify(obj)); - } catch { - // Ignore storage errors - } -} - -interface ChatStoreState { - activeStreams: Map; - completedStreams: Map; - activeSessions: Set; - streamCompleteCallbacks: Set; - /** Active tasks for SSE reconnection - keyed by sessionId */ - activeTasks: Map; -} - -interface ChatStoreActions { - startStream: ( - sessionId: string, - message: string, - isUserMessage: boolean, - context?: { url: string; content: string }, - onChunk?: (chunk: StreamChunk) => void, - ) => Promise; - stopStream: (sessionId: string) => void; - subscribeToStream: ( - sessionId: string, - onChunk: (chunk: StreamChunk) => void, - skipReplay?: boolean, - ) => () => void; - getStreamStatus: (sessionId: string) => StreamStatus; - getCompletedStream: (sessionId: string) => StreamResult | undefined; - clearCompletedStream: (sessionId: string) => void; - isStreaming: (sessionId: string) => boolean; - registerActiveSession: (sessionId: string) => void; - unregisterActiveSession: (sessionId: string) => void; - isSessionActive: (sessionId: string) => boolean; - onStreamComplete: (callback: StreamCompleteCallback) => () => void; - /** Track active task for SSE reconnection */ - setActiveTask: ( - sessionId: string, - taskInfo: Omit, - ) => void; - /** Get active task for a session */ - getActiveTask: (sessionId: string) => ActiveTaskInfo | undefined; - /** Clear active task when operation completes */ - clearActiveTask: (sessionId: string) => void; - /** Reconnect to an existing task stream */ - reconnectToTask: ( - sessionId: string, - taskId: string, - lastMessageId?: string, - onChunk?: (chunk: StreamChunk) => void, - ) => Promise; - /** Update last message ID for a task (for tracking replay position) */ - updateTaskLastMessageId: (sessionId: string, lastMessageId: string) => void; -} - -type ChatStore = ChatStoreState & ChatStoreActions; - -function notifyStreamComplete( - callbacks: Set, - sessionId: string, -) { - for (const callback of callbacks) { - try { - callback(sessionId); - } catch (err) { - console.warn("[ChatStore] Stream complete callback error:", err); - } - } -} - -function cleanupExpiredStreams( - completedStreams: Map, -): Map { - const now = Date.now(); - const cleaned = new Map(completedStreams); - for (const [sessionId, result] of cleaned) { - if (now - result.completedAt > COMPLETED_STREAM_TTL_MS) { - cleaned.delete(sessionId); - } - } - return cleaned; -} - -/** - * Finalize a stream by moving it from activeStreams to completedStreams. - * Also handles cleanup and notifications. - */ -function finalizeStream( - sessionId: string, - stream: ActiveStream, - onChunk: ((chunk: StreamChunk) => void) | undefined, - get: () => ChatStoreState & ChatStoreActions, - set: (state: Partial) => void, -): void { - if (onChunk) stream.onChunkCallbacks.delete(onChunk); - - if (stream.status !== "streaming") { - const currentState = get(); - const finalActiveStreams = new Map(currentState.activeStreams); - let finalCompletedStreams = new Map(currentState.completedStreams); - - const storedStream = finalActiveStreams.get(sessionId); - if (storedStream === stream) { - const result: StreamResult = { - sessionId, - status: stream.status, - chunks: stream.chunks, - completedAt: Date.now(), - error: stream.error, - }; - finalCompletedStreams.set(sessionId, result); - finalActiveStreams.delete(sessionId); - finalCompletedStreams = cleanupExpiredStreams(finalCompletedStreams); - set({ - activeStreams: finalActiveStreams, - completedStreams: finalCompletedStreams, - }); - - if (stream.status === "completed" || stream.status === "error") { - notifyStreamComplete(currentState.streamCompleteCallbacks, sessionId); - } - } - } -} - -/** - * Clean up an existing stream for a session and move it to completed streams. - * Returns updated maps for both active and completed streams. - */ -function cleanupExistingStream( - sessionId: string, - activeStreams: Map, - completedStreams: Map, - callbacks: Set, -): { - activeStreams: Map; - completedStreams: Map; -} { - const newActiveStreams = new Map(activeStreams); - let newCompletedStreams = new Map(completedStreams); - - const existingStream = newActiveStreams.get(sessionId); - if (existingStream) { - existingStream.abortController.abort(); - const normalizedStatus = - existingStream.status === "streaming" - ? "completed" - : existingStream.status; - const result: StreamResult = { - sessionId, - status: normalizedStatus, - chunks: existingStream.chunks, - completedAt: Date.now(), - error: existingStream.error, - }; - newCompletedStreams.set(sessionId, result); - newActiveStreams.delete(sessionId); - newCompletedStreams = cleanupExpiredStreams(newCompletedStreams); - if (normalizedStatus === "completed" || normalizedStatus === "error") { - notifyStreamComplete(callbacks, sessionId); - } - } - - return { - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - }; -} - -/** - * Create a new active stream with initial state. - */ -function createActiveStream( - sessionId: string, - onChunk?: (chunk: StreamChunk) => void, -): ActiveStream { - const abortController = new AbortController(); - const initialCallbacks = new Set<(chunk: StreamChunk) => void>(); - if (onChunk) initialCallbacks.add(onChunk); - - return { - sessionId, - abortController, - status: "streaming", - startedAt: Date.now(), - chunks: [], - onChunkCallbacks: initialCallbacks, - }; -} - -export const useChatStore = create((set, get) => ({ - activeStreams: new Map(), - completedStreams: new Map(), - activeSessions: new Set(), - streamCompleteCallbacks: new Set(), - activeTasks: loadPersistedTasks(), - - startStream: async function startStream( - sessionId, - message, - isUserMessage, - context, - onChunk, - ) { - const state = get(); - const callbacks = state.streamCompleteCallbacks; - - // Clean up any existing stream for this session - const { - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - } = cleanupExistingStream( - sessionId, - state.activeStreams, - state.completedStreams, - callbacks, - ); - - // Create new stream - const stream = createActiveStream(sessionId, onChunk); - newActiveStreams.set(sessionId, stream); - set({ - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - }); - - try { - await executeStream(stream, message, isUserMessage, context); - } finally { - finalizeStream(sessionId, stream, onChunk, get, set); - } - }, - - stopStream: function stopStream(sessionId) { - const state = get(); - const stream = state.activeStreams.get(sessionId); - if (!stream) return; - - stream.abortController.abort(); - stream.status = "completed"; - - const newActiveStreams = new Map(state.activeStreams); - let newCompletedStreams = new Map(state.completedStreams); - - const result: StreamResult = { - sessionId, - status: stream.status, - chunks: stream.chunks, - completedAt: Date.now(), - error: stream.error, - }; - newCompletedStreams.set(sessionId, result); - newActiveStreams.delete(sessionId); - newCompletedStreams = cleanupExpiredStreams(newCompletedStreams); - - set({ - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - }); - - notifyStreamComplete(state.streamCompleteCallbacks, sessionId); - }, - - subscribeToStream: function subscribeToStream( - sessionId, - onChunk, - skipReplay = false, - ) { - const state = get(); - const stream = state.activeStreams.get(sessionId); - - if (stream) { - if (!skipReplay) { - for (const chunk of stream.chunks) { - onChunk(chunk); - } - } - - stream.onChunkCallbacks.add(onChunk); - - return function unsubscribe() { - stream.onChunkCallbacks.delete(onChunk); - }; - } - - return function noop() {}; - }, - - getStreamStatus: function getStreamStatus(sessionId) { - const { activeStreams, completedStreams } = get(); - - const active = activeStreams.get(sessionId); - if (active) return active.status; - - const completed = completedStreams.get(sessionId); - if (completed) return completed.status; - - return "idle"; - }, - - getCompletedStream: function getCompletedStream(sessionId) { - return get().completedStreams.get(sessionId); - }, - - clearCompletedStream: function clearCompletedStream(sessionId) { - const state = get(); - if (!state.completedStreams.has(sessionId)) return; - - const newCompletedStreams = new Map(state.completedStreams); - newCompletedStreams.delete(sessionId); - set({ completedStreams: newCompletedStreams }); - }, - - isStreaming: function isStreaming(sessionId) { - const stream = get().activeStreams.get(sessionId); - return stream?.status === "streaming"; - }, - - registerActiveSession: function registerActiveSession(sessionId) { - const state = get(); - if (state.activeSessions.has(sessionId)) return; - - const newActiveSessions = new Set(state.activeSessions); - newActiveSessions.add(sessionId); - set({ activeSessions: newActiveSessions }); - }, - - unregisterActiveSession: function unregisterActiveSession(sessionId) { - const state = get(); - if (!state.activeSessions.has(sessionId)) return; - - const newActiveSessions = new Set(state.activeSessions); - newActiveSessions.delete(sessionId); - set({ activeSessions: newActiveSessions }); - }, - - isSessionActive: function isSessionActive(sessionId) { - return get().activeSessions.has(sessionId); - }, - - onStreamComplete: function onStreamComplete(callback) { - const state = get(); - const newCallbacks = new Set(state.streamCompleteCallbacks); - newCallbacks.add(callback); - set({ streamCompleteCallbacks: newCallbacks }); - - return function unsubscribe() { - const currentState = get(); - const cleanedCallbacks = new Set(currentState.streamCompleteCallbacks); - cleanedCallbacks.delete(callback); - set({ streamCompleteCallbacks: cleanedCallbacks }); - }; - }, - - setActiveTask: function setActiveTask(sessionId, taskInfo) { - const state = get(); - const newActiveTasks = new Map(state.activeTasks); - newActiveTasks.set(sessionId, { - ...taskInfo, - sessionId, - startedAt: Date.now(), - }); - set({ activeTasks: newActiveTasks }); - persistTasks(newActiveTasks); - }, - - getActiveTask: function getActiveTask(sessionId) { - return get().activeTasks.get(sessionId); - }, - - clearActiveTask: function clearActiveTask(sessionId) { - const state = get(); - if (!state.activeTasks.has(sessionId)) return; - - const newActiveTasks = new Map(state.activeTasks); - newActiveTasks.delete(sessionId); - set({ activeTasks: newActiveTasks }); - persistTasks(newActiveTasks); - }, - - reconnectToTask: async function reconnectToTask( - sessionId, - taskId, - lastMessageId = INITIAL_STREAM_ID, - onChunk, - ) { - const state = get(); - const callbacks = state.streamCompleteCallbacks; - - // Clean up any existing stream for this session - const { - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - } = cleanupExistingStream( - sessionId, - state.activeStreams, - state.completedStreams, - callbacks, - ); - - // Create new stream for reconnection - const stream = createActiveStream(sessionId, onChunk); - newActiveStreams.set(sessionId, stream); - set({ - activeStreams: newActiveStreams, - completedStreams: newCompletedStreams, - }); - - try { - await executeTaskReconnect(stream, taskId, lastMessageId); - } finally { - finalizeStream(sessionId, stream, onChunk, get, set); - - // Clear active task on completion - if (stream.status === "completed" || stream.status === "error") { - const taskState = get(); - if (taskState.activeTasks.has(sessionId)) { - const newActiveTasks = new Map(taskState.activeTasks); - newActiveTasks.delete(sessionId); - set({ activeTasks: newActiveTasks }); - persistTasks(newActiveTasks); - } - } - } - }, - - updateTaskLastMessageId: function updateTaskLastMessageId( - sessionId, - lastMessageId, - ) { - const state = get(); - const task = state.activeTasks.get(sessionId); - if (!task) return; - - const newActiveTasks = new Map(state.activeTasks); - newActiveTasks.set(sessionId, { - ...task, - lastMessageId, - }); - set({ activeTasks: newActiveTasks }); - persistTasks(newActiveTasks); - }, -})); diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts b/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts deleted file mode 100644 index 34813e17fe..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts +++ /dev/null @@ -1,163 +0,0 @@ -import type { ToolArguments, ToolResult } from "@/types/chat"; - -export type StreamStatus = "idle" | "streaming" | "completed" | "error"; - -export interface StreamChunk { - type: - | "stream_start" - | "text_chunk" - | "text_ended" - | "tool_call" - | "tool_call_start" - | "tool_response" - | "login_needed" - | "need_login" - | "credentials_needed" - | "error" - | "usage" - | "stream_end"; - taskId?: string; - timestamp?: string; - content?: string; - message?: string; - code?: string; - details?: Record; - tool_id?: string; - tool_name?: string; - arguments?: ToolArguments; - result?: ToolResult; - success?: boolean; - idx?: number; - session_id?: string; - agent_info?: { - graph_id: string; - name: string; - trigger_type: string; - }; - provider?: string; - provider_name?: string; - credential_type?: string; - scopes?: string[]; - title?: string; - [key: string]: unknown; -} - -export type VercelStreamChunk = - | { type: "start"; messageId: string; taskId?: string } - | { type: "finish" } - | { type: "text-start"; id: string } - | { type: "text-delta"; id: string; delta: string } - | { type: "text-end"; id: string } - | { type: "tool-input-start"; toolCallId: string; toolName: string } - | { - type: "tool-input-available"; - toolCallId: string; - toolName: string; - input: Record; - } - | { - type: "tool-output-available"; - toolCallId: string; - toolName?: string; - output: unknown; - success?: boolean; - } - | { - type: "usage"; - promptTokens: number; - completionTokens: number; - totalTokens: number; - } - | { - type: "error"; - errorText: string; - code?: string; - details?: Record; - }; - -export interface ActiveStream { - sessionId: string; - abortController: AbortController; - status: StreamStatus; - startedAt: number; - chunks: StreamChunk[]; - error?: Error; - onChunkCallbacks: Set<(chunk: StreamChunk) => void>; -} - -export interface StreamResult { - sessionId: string; - status: StreamStatus; - chunks: StreamChunk[]; - completedAt: number; - error?: Error; -} - -export type StreamCompleteCallback = (sessionId: string) => void; - -// Type guards for message types - -/** - * Check if a message has a toolId property. - */ -export function hasToolId( - msg: T, -): msg is T & { toolId: string } { - return ( - "toolId" in msg && - typeof (msg as Record).toolId === "string" - ); -} - -/** - * Check if a message has an operationId property. - */ -export function hasOperationId( - msg: T, -): msg is T & { operationId: string } { - return ( - "operationId" in msg && - typeof (msg as Record).operationId === "string" - ); -} - -/** - * Check if a message has a toolCallId property. - */ -export function hasToolCallId( - msg: T, -): msg is T & { toolCallId: string } { - return ( - "toolCallId" in msg && - typeof (msg as Record).toolCallId === "string" - ); -} - -/** - * Check if a message is an operation message type. - */ -export function isOperationMessage( - msg: T, -): msg is T & { - type: "operation_started" | "operation_pending" | "operation_in_progress"; -} { - return ( - msg.type === "operation_started" || - msg.type === "operation_pending" || - msg.type === "operation_in_progress" - ); -} - -/** - * Get the tool ID from a message if available. - * Checks toolId, operationId, and toolCallId properties. - */ -export function getToolIdFromMessage( - msg: T, -): string | undefined { - const record = msg as Record; - if (typeof record.toolId === "string") return record.toolId; - if (typeof record.operationId === "string") return record.operationId; - if (typeof record.toolCallId === "string") return record.toolCallId; - return undefined; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx deleted file mode 100644 index f5d56fcb15..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { cn } from "@/lib/utils"; -import { ReactNode } from "react"; - -export interface AIChatBubbleProps { - children: ReactNode; - className?: string; -} - -export function AIChatBubble({ children, className }: AIChatBubbleProps) { - return ( -
- {children} -
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx deleted file mode 100644 index 582b24de5e..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx +++ /dev/null @@ -1,119 +0,0 @@ -import { Button } from "@/components/atoms/Button/Button"; -import { Card } from "@/components/atoms/Card/Card"; -import { Text } from "@/components/atoms/Text/Text"; -import { cn } from "@/lib/utils"; -import { ArrowRight, List, Robot } from "@phosphor-icons/react"; -import Image from "next/image"; - -export interface Agent { - id: string; - name: string; - description: string; - version?: number; - image_url?: string; -} - -export interface AgentCarouselMessageProps { - agents: Agent[]; - totalCount?: number; - onSelectAgent?: (agentId: string) => void; - className?: string; -} - -export function AgentCarouselMessage({ - agents, - totalCount, - onSelectAgent, - className, -}: AgentCarouselMessageProps) { - const displayCount = totalCount ?? agents.length; - - return ( -
- {/* Header */} -
-
- -
-
- - Found {displayCount} {displayCount === 1 ? "Agent" : "Agents"} - - - Select an agent to view details or run it - -
-
- - {/* Agent Cards */} -
- {agents.map((agent) => ( - -
-
- {agent.image_url ? ( - {`${agent.name} - ) : ( -
- -
- )} -
-
-
- - {agent.name} - - {agent.version && ( - - v{agent.version} - - )} -
- - {agent.description} - - {onSelectAgent && ( - - )} -
-
-
- ))} -
- - {totalCount && totalCount > agents.length && ( - - Showing {agents.length} of {totalCount} results - - )} -
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx deleted file mode 100644 index 3ef71eca09..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx +++ /dev/null @@ -1,246 +0,0 @@ -"use client"; - -import { Button } from "@/components/atoms/Button/Button"; -import { Card } from "@/components/atoms/Card/Card"; -import { Text } from "@/components/atoms/Text/Text"; -import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput"; -import { RunAgentInputs } from "@/components/contextual/RunAgentInputs/RunAgentInputs"; - -import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import { - BlockIOCredentialsSubSchema, - BlockIOSubSchema, -} from "@/lib/autogpt-server-api/types"; -import { cn, isEmpty } from "@/lib/utils"; -import { PlayIcon, WarningIcon } from "@phosphor-icons/react"; -import { useMemo } from "react"; -import { useAgentInputsSetup } from "./useAgentInputsSetup"; - -type LibraryAgentInputSchemaProperties = LibraryAgent["input_schema"] extends { - properties: infer P; -} - ? P extends Record - ? P - : Record - : Record; - -type LibraryAgentCredentialsInputSchemaProperties = - LibraryAgent["credentials_input_schema"] extends { - properties: infer P; - } - ? P extends Record - ? P - : Record - : Record; - -interface Props { - agentName?: string; - inputSchema: LibraryAgentInputSchemaProperties | Record; - credentialsSchema?: - | LibraryAgentCredentialsInputSchemaProperties - | Record; - message: string; - requiredFields?: string[]; - onRun: ( - inputs: Record, - credentials: Record, - ) => void; - onCancel?: () => void; - className?: string; -} - -export function AgentInputsSetup({ - agentName, - inputSchema, - credentialsSchema, - message, - requiredFields, - onRun, - onCancel, - className, -}: Props) { - const { inputValues, setInputValue, credentialsValues, setCredentialsValue } = - useAgentInputsSetup(); - - const inputSchemaObj = useMemo(() => { - if (!inputSchema) return { properties: {}, required: [] }; - if ("properties" in inputSchema && "type" in inputSchema) { - return inputSchema as { - properties: Record; - required?: string[]; - }; - } - return { properties: inputSchema as Record, required: [] }; - }, [inputSchema]); - - const credentialsSchemaObj = useMemo(() => { - if (!credentialsSchema) return { properties: {}, required: [] }; - if ("properties" in credentialsSchema && "type" in credentialsSchema) { - return credentialsSchema as { - properties: Record; - required?: string[]; - }; - } - return { - properties: credentialsSchema as Record, - required: [], - }; - }, [credentialsSchema]); - - const agentInputFields = useMemo(() => { - const properties = inputSchemaObj.properties || {}; - return Object.fromEntries( - Object.entries(properties).filter( - ([_, subSchema]: [string, any]) => !subSchema.hidden, - ), - ); - }, [inputSchemaObj]); - - const agentCredentialsInputFields = useMemo(() => { - return credentialsSchemaObj.properties || {}; - }, [credentialsSchemaObj]); - - const inputFields = Object.entries(agentInputFields); - const credentialFields = Object.entries(agentCredentialsInputFields); - - const defaultsFromSchema = useMemo(() => { - const defaults: Record = {}; - Object.entries(agentInputFields).forEach(([key, schema]) => { - if ("default" in schema && schema.default !== undefined) { - defaults[key] = schema.default; - } - }); - return defaults; - }, [agentInputFields]); - - const defaultsFromCredentialsSchema = useMemo(() => { - const defaults: Record = {}; - Object.entries(agentCredentialsInputFields).forEach(([key, schema]) => { - if ("default" in schema && schema.default !== undefined) { - defaults[key] = schema.default; - } - }); - return defaults; - }, [agentCredentialsInputFields]); - - const mergedInputValues = useMemo(() => { - return { ...defaultsFromSchema, ...inputValues }; - }, [defaultsFromSchema, inputValues]); - - const mergedCredentialsValues = useMemo(() => { - return { ...defaultsFromCredentialsSchema, ...credentialsValues }; - }, [defaultsFromCredentialsSchema, credentialsValues]); - - const allRequiredInputsAreSet = useMemo(() => { - const requiredInputs = new Set( - requiredFields || (inputSchemaObj.required as string[]) || [], - ); - const nonEmptyInputs = new Set( - Object.keys(mergedInputValues).filter( - (k) => !isEmpty(mergedInputValues[k]), - ), - ); - const missing = [...requiredInputs].filter( - (input) => !nonEmptyInputs.has(input), - ); - return missing.length === 0; - }, [inputSchemaObj.required, mergedInputValues, requiredFields]); - - const allCredentialsAreSet = useMemo(() => { - const requiredCredentials = new Set( - (credentialsSchemaObj.required as string[]) || [], - ); - if (requiredCredentials.size === 0) { - return true; - } - const missing = [...requiredCredentials].filter((key) => { - const cred = mergedCredentialsValues[key]; - return !cred || !cred.id; - }); - return missing.length === 0; - }, [credentialsSchemaObj.required, mergedCredentialsValues]); - - const canRun = allRequiredInputsAreSet && allCredentialsAreSet; - - function handleRun() { - if (canRun) { - onRun(mergedInputValues, mergedCredentialsValues); - } - } - - return ( - -
-
- -
-
- - {agentName ? `Configure ${agentName}` : "Agent Configuration"} - - - {message} - - - {inputFields.length > 0 && ( -
- {inputFields.map(([key, inputSubSchema]) => ( - setInputValue(key, value)} - /> - ))} -
- )} - - {credentialFields.length > 0 && ( -
- {credentialFields.map(([key, schema]) => { - const requiredCredentials = new Set( - (credentialsSchemaObj.required as string[]) || [], - ); - return ( - - setCredentialsValue(key, value) - } - siblingInputs={mergedInputValues} - isOptional={!requiredCredentials.has(key)} - /> - ); - })} -
- )} - -
- - {onCancel && ( - - )} -
-
-
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts deleted file mode 100644 index e36a3f3c5d..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts +++ /dev/null @@ -1,38 +0,0 @@ -import type { CredentialsMetaInput } from "@/lib/autogpt-server-api/types"; -import { useState } from "react"; - -export function useAgentInputsSetup() { - const [inputValues, setInputValues] = useState>({}); - const [credentialsValues, setCredentialsValues] = useState< - Record - >({}); - - function setInputValue(key: string, value: any) { - setInputValues((prev) => ({ - ...prev, - [key]: value, - })); - } - - function setCredentialsValue(key: string, value?: CredentialsMetaInput) { - if (value) { - setCredentialsValues((prev) => ({ - ...prev, - [key]: value, - })); - } else { - setCredentialsValues((prev) => { - const next = { ...prev }; - delete next[key]; - return next; - }); - } - } - - return { - inputValues, - setInputValue, - credentialsValues, - setCredentialsValue, - }; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx deleted file mode 100644 index b2cf92ec56..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx +++ /dev/null @@ -1,120 +0,0 @@ -"use client"; - -import { Button } from "@/components/atoms/Button/Button"; -import { cn } from "@/lib/utils"; -import { ShieldIcon, SignInIcon, UserPlusIcon } from "@phosphor-icons/react"; -import { useRouter } from "next/navigation"; - -export interface AuthPromptWidgetProps { - message: string; - sessionId: string; - agentInfo?: { - graph_id: string; - name: string; - trigger_type: string; - }; - returnUrl?: string; - className?: string; -} - -export function AuthPromptWidget({ - message, - sessionId, - agentInfo, - returnUrl = "/copilot/chat", - className, -}: AuthPromptWidgetProps) { - const router = useRouter(); - - function handleSignIn() { - if (typeof window !== "undefined") { - localStorage.setItem("pending_chat_session", sessionId); - if (agentInfo) { - localStorage.setItem("pending_agent_setup", JSON.stringify(agentInfo)); - } - } - const returnUrlWithSession = `${returnUrl}?session_id=${sessionId}`; - const encodedReturnUrl = encodeURIComponent(returnUrlWithSession); - router.push(`/login?returnUrl=${encodedReturnUrl}`); - } - - function handleSignUp() { - if (typeof window !== "undefined") { - localStorage.setItem("pending_chat_session", sessionId); - if (agentInfo) { - localStorage.setItem("pending_agent_setup", JSON.stringify(agentInfo)); - } - } - const returnUrlWithSession = `${returnUrl}?session_id=${sessionId}`; - const encodedReturnUrl = encodeURIComponent(returnUrlWithSession); - router.push(`/signup?returnUrl=${encodedReturnUrl}`); - } - - return ( -
-
-
-
- -
-
-

- Authentication Required -

-

- Sign in to set up and manage agents -

-
-
- -
-

{message}

- {agentInfo && ( -
-

- Ready to set up:{" "} - {agentInfo.name} -

-

- Type:{" "} - {agentInfo.trigger_type} -

-
- )} -
- -
- - -
- -
- Your chat session will be preserved after signing in -
-
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx deleted file mode 100644 index fbf2d5d143..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx +++ /dev/null @@ -1,130 +0,0 @@ -import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; -import { Button } from "@/components/atoms/Button/Button"; -import { Text } from "@/components/atoms/Text/Text"; -import { Dialog } from "@/components/molecules/Dialog/Dialog"; -import { cn } from "@/lib/utils"; -import { GlobeHemisphereEastIcon } from "@phosphor-icons/react"; -import { useEffect } from "react"; -import { ChatInput } from "../ChatInput/ChatInput"; -import { MessageList } from "../MessageList/MessageList"; -import { useChatContainer } from "./useChatContainer"; - -export interface ChatContainerProps { - sessionId: string | null; - initialMessages: SessionDetailResponse["messages"]; - initialPrompt?: string; - className?: string; - onStreamingChange?: (isStreaming: boolean) => void; - onOperationStarted?: () => void; - /** Active stream info from the server for reconnection */ - activeStream?: { - taskId: string; - lastMessageId: string; - operationId: string; - toolName: string; - }; -} - -export function ChatContainer({ - sessionId, - initialMessages, - initialPrompt, - className, - onStreamingChange, - onOperationStarted, - activeStream, -}: ChatContainerProps) { - const { - messages, - streamingChunks, - isStreaming, - stopStreaming, - isRegionBlockedModalOpen, - sendMessageWithContext, - handleRegionModalOpenChange, - handleRegionModalClose, - } = useChatContainer({ - sessionId, - initialMessages, - initialPrompt, - onOperationStarted, - activeStream, - }); - - useEffect(() => { - onStreamingChange?.(isStreaming); - }, [isStreaming, onStreamingChange]); - - return ( -
- - - - Service unavailable - -
- } - controlled={{ - isOpen: isRegionBlockedModalOpen, - set: handleRegionModalOpenChange, - }} - onClose={handleRegionModalClose} - styling={{ maxWidth: 550, width: "100%", minWidth: "auto" }} - > - -
- - The Autogpt AI model is not available in your region or your - connection is blocking it. Please try again with a different - connection. - -
- -
-
-
- - {/* Messages - Scrollable */} -
-
- -
-
- - {/* Input - Fixed at bottom */} -
-
- -
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts deleted file mode 100644 index af3b3329b7..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { toast } from "sonner"; -import type { StreamChunk } from "../../chat-types"; -import type { HandlerDependencies } from "./handlers"; -import { - getErrorDisplayMessage, - handleError, - handleLoginNeeded, - handleStreamEnd, - handleTextChunk, - handleTextEnded, - handleToolCallStart, - handleToolResponse, - isRegionBlockedError, -} from "./handlers"; - -export function createStreamEventDispatcher( - deps: HandlerDependencies, -): (chunk: StreamChunk) => void { - return function dispatchStreamEvent(chunk: StreamChunk): void { - if ( - chunk.type === "text_chunk" || - chunk.type === "tool_call_start" || - chunk.type === "tool_response" || - chunk.type === "login_needed" || - chunk.type === "need_login" || - chunk.type === "error" - ) { - deps.hasResponseRef.current = true; - } - - switch (chunk.type) { - case "stream_start": - // Store task ID for SSE reconnection - if (chunk.taskId && deps.onActiveTaskStarted) { - deps.onActiveTaskStarted({ - taskId: chunk.taskId, - operationId: chunk.taskId, - toolName: "chat", - toolCallId: "chat_stream", - }); - } - break; - - case "text_chunk": - handleTextChunk(chunk, deps); - break; - - case "text_ended": - handleTextEnded(chunk, deps); - break; - - case "tool_call_start": - handleToolCallStart(chunk, deps); - break; - - case "tool_response": - handleToolResponse(chunk, deps); - break; - - case "login_needed": - case "need_login": - handleLoginNeeded(chunk, deps); - break; - - case "stream_end": - // Note: "finish" type from backend gets normalized to "stream_end" by normalizeStreamChunk - handleStreamEnd(chunk, deps); - break; - - case "error": - const isRegionBlocked = isRegionBlockedError(chunk); - handleError(chunk, deps); - // Show toast at dispatcher level to avoid circular dependencies - if (!isRegionBlocked) { - toast.error("Chat Error", { - description: getErrorDisplayMessage(chunk), - }); - } - break; - - case "usage": - // TODO: Handle usage for display - break; - - default: - console.warn("Unknown stream chunk type:", chunk); - } - }; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts deleted file mode 100644 index 5aec5b9818..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts +++ /dev/null @@ -1,362 +0,0 @@ -import type { Dispatch, MutableRefObject, SetStateAction } from "react"; -import { StreamChunk } from "../../useChatStream"; -import type { ChatMessageData } from "../ChatMessage/useChatMessage"; -import { - extractCredentialsNeeded, - extractInputsNeeded, - parseToolResponse, -} from "./helpers"; - -function isToolCallMessage( - message: ChatMessageData, -): message is Extract { - return message.type === "tool_call"; -} - -export interface HandlerDependencies { - setHasTextChunks: Dispatch>; - setStreamingChunks: Dispatch>; - streamingChunksRef: MutableRefObject; - hasResponseRef: MutableRefObject; - textFinalizedRef: MutableRefObject; - streamEndedRef: MutableRefObject; - setMessages: Dispatch>; - setIsStreamingInitiated: Dispatch>; - setIsRegionBlockedModalOpen: Dispatch>; - sessionId: string; - onOperationStarted?: () => void; - onActiveTaskStarted?: (taskInfo: { - taskId: string; - operationId: string; - toolName: string; - toolCallId: string; - }) => void; -} - -export function isRegionBlockedError(chunk: StreamChunk): boolean { - if (chunk.code === "MODEL_NOT_AVAILABLE_REGION") return true; - const message = chunk.message || chunk.content; - if (typeof message !== "string") return false; - return message.toLowerCase().includes("not available in your region"); -} - -export function getUserFriendlyErrorMessage( - code: string | undefined, -): string | undefined { - switch (code) { - case "TASK_EXPIRED": - return "This operation has expired. Please try again."; - case "TASK_NOT_FOUND": - return "Could not find the requested operation."; - case "ACCESS_DENIED": - return "You do not have access to this operation."; - case "QUEUE_OVERFLOW": - return "Connection was interrupted. Please refresh to continue."; - case "MODEL_NOT_AVAILABLE_REGION": - return "This model is not available in your region."; - default: - return undefined; - } -} - -export function handleTextChunk(chunk: StreamChunk, deps: HandlerDependencies) { - if (!chunk.content) return; - deps.setHasTextChunks(true); - deps.setStreamingChunks((prev) => { - const updated = [...prev, chunk.content!]; - deps.streamingChunksRef.current = updated; - return updated; - }); -} - -export function handleTextEnded( - _chunk: StreamChunk, - deps: HandlerDependencies, -) { - if (deps.textFinalizedRef.current) { - return; - } - - const completedText = deps.streamingChunksRef.current.join(""); - if (completedText.trim()) { - deps.textFinalizedRef.current = true; - - deps.setMessages((prev) => { - const exists = prev.some( - (msg) => - msg.type === "message" && - msg.role === "assistant" && - msg.content === completedText, - ); - if (exists) return prev; - - const assistantMessage: ChatMessageData = { - type: "message", - role: "assistant", - content: completedText, - timestamp: new Date(), - }; - return [...prev, assistantMessage]; - }); - } - deps.setStreamingChunks([]); - deps.streamingChunksRef.current = []; - deps.setHasTextChunks(false); -} - -export function handleToolCallStart( - chunk: StreamChunk, - deps: HandlerDependencies, -) { - // Use deterministic fallback instead of Date.now() to ensure same ID on replay - const toolId = - chunk.tool_id || - `tool-${deps.sessionId}-${chunk.idx ?? "unknown"}-${chunk.tool_name || "unknown"}`; - - const toolCallMessage: Extract = { - type: "tool_call", - toolId, - toolName: chunk.tool_name || "Executing", - arguments: chunk.arguments || {}, - timestamp: new Date(), - }; - - function updateToolCallMessages(prev: ChatMessageData[]) { - const existingIndex = prev.findIndex(function findToolCallIndex(msg) { - return isToolCallMessage(msg) && msg.toolId === toolCallMessage.toolId; - }); - if (existingIndex === -1) { - return [...prev, toolCallMessage]; - } - const nextMessages = [...prev]; - const existing = nextMessages[existingIndex]; - if (!isToolCallMessage(existing)) return prev; - const nextArguments = - toolCallMessage.arguments && - Object.keys(toolCallMessage.arguments).length > 0 - ? toolCallMessage.arguments - : existing.arguments; - nextMessages[existingIndex] = { - ...existing, - toolName: toolCallMessage.toolName || existing.toolName, - arguments: nextArguments, - timestamp: toolCallMessage.timestamp, - }; - return nextMessages; - } - - deps.setMessages(updateToolCallMessages); -} - -const TOOL_RESPONSE_TYPES = new Set([ - "tool_response", - "operation_started", - "operation_pending", - "operation_in_progress", - "execution_started", - "agent_carousel", - "clarification_needed", -]); - -function hasResponseForTool( - messages: ChatMessageData[], - toolId: string, -): boolean { - return messages.some((msg) => { - if (!TOOL_RESPONSE_TYPES.has(msg.type)) return false; - const msgToolId = - (msg as { toolId?: string }).toolId || - (msg as { toolCallId?: string }).toolCallId; - return msgToolId === toolId; - }); -} - -export function handleToolResponse( - chunk: StreamChunk, - deps: HandlerDependencies, -) { - let toolName = chunk.tool_name || "unknown"; - if (!chunk.tool_name || chunk.tool_name === "unknown") { - deps.setMessages((prev) => { - const matchingToolCall = [...prev] - .reverse() - .find( - (msg) => msg.type === "tool_call" && msg.toolId === chunk.tool_id, - ); - if (matchingToolCall && matchingToolCall.type === "tool_call") { - toolName = matchingToolCall.toolName; - } - return prev; - }); - } - const responseMessage = parseToolResponse( - chunk.result!, - chunk.tool_id!, - toolName, - new Date(), - ); - if (!responseMessage) { - let parsedResult: Record | null = null; - try { - parsedResult = - typeof chunk.result === "string" - ? JSON.parse(chunk.result) - : (chunk.result as Record); - } catch { - parsedResult = null; - } - if ( - (chunk.tool_name === "run_agent" || chunk.tool_name === "run_block") && - chunk.success && - parsedResult?.type === "setup_requirements" - ) { - const inputsMessage = extractInputsNeeded(parsedResult, chunk.tool_name); - if (inputsMessage) { - deps.setMessages((prev) => { - // Check for duplicate inputs_needed message - const exists = prev.some((msg) => msg.type === "inputs_needed"); - if (exists) return prev; - return [...prev, inputsMessage]; - }); - } - const credentialsMessage = extractCredentialsNeeded( - parsedResult, - chunk.tool_name, - ); - if (credentialsMessage) { - deps.setMessages((prev) => { - // Check for duplicate credentials_needed message - const exists = prev.some((msg) => msg.type === "credentials_needed"); - if (exists) return prev; - return [...prev, credentialsMessage]; - }); - } - } - return; - } - if (responseMessage.type === "operation_started") { - deps.onOperationStarted?.(); - const taskId = (responseMessage as { taskId?: string }).taskId; - if (taskId && deps.onActiveTaskStarted) { - deps.onActiveTaskStarted({ - taskId, - operationId: - (responseMessage as { operationId?: string }).operationId || "", - toolName: (responseMessage as { toolName?: string }).toolName || "", - toolCallId: (responseMessage as { toolId?: string }).toolId || "", - }); - } - } - - deps.setMessages((prev) => { - const toolCallIndex = prev.findIndex( - (msg) => msg.type === "tool_call" && msg.toolId === chunk.tool_id, - ); - if (hasResponseForTool(prev, chunk.tool_id!)) { - return prev; - } - if (toolCallIndex !== -1) { - const newMessages = [...prev]; - newMessages.splice(toolCallIndex + 1, 0, responseMessage); - return newMessages; - } - return [...prev, responseMessage]; - }); -} - -export function handleLoginNeeded( - chunk: StreamChunk, - deps: HandlerDependencies, -) { - const loginNeededMessage: ChatMessageData = { - type: "login_needed", - toolName: "login_needed", - message: chunk.message || "Please sign in to use chat and agent features", - sessionId: chunk.session_id || deps.sessionId, - agentInfo: chunk.agent_info, - timestamp: new Date(), - }; - deps.setMessages((prev) => { - // Check for duplicate login_needed message - const exists = prev.some((msg) => msg.type === "login_needed"); - if (exists) return prev; - return [...prev, loginNeededMessage]; - }); -} - -export function handleStreamEnd( - _chunk: StreamChunk, - deps: HandlerDependencies, -) { - if (deps.streamEndedRef.current) { - return; - } - deps.streamEndedRef.current = true; - - const completedContent = deps.streamingChunksRef.current.join(""); - if (!completedContent.trim() && !deps.hasResponseRef.current) { - deps.setMessages((prev) => { - const exists = prev.some( - (msg) => - msg.type === "message" && - msg.role === "assistant" && - msg.content === "No response received. Please try again.", - ); - if (exists) return prev; - return [ - ...prev, - { - type: "message", - role: "assistant", - content: "No response received. Please try again.", - timestamp: new Date(), - }, - ]; - }); - } - if (completedContent.trim() && !deps.textFinalizedRef.current) { - deps.textFinalizedRef.current = true; - - deps.setMessages((prev) => { - const exists = prev.some( - (msg) => - msg.type === "message" && - msg.role === "assistant" && - msg.content === completedContent, - ); - if (exists) return prev; - - const assistantMessage: ChatMessageData = { - type: "message", - role: "assistant", - content: completedContent, - timestamp: new Date(), - }; - return [...prev, assistantMessage]; - }); - } - deps.setStreamingChunks([]); - deps.streamingChunksRef.current = []; - deps.setHasTextChunks(false); - deps.setIsStreamingInitiated(false); -} - -export function handleError(chunk: StreamChunk, deps: HandlerDependencies) { - if (isRegionBlockedError(chunk)) { - deps.setIsRegionBlockedModalOpen(true); - } - deps.setIsStreamingInitiated(false); - deps.setHasTextChunks(false); - deps.setStreamingChunks([]); - deps.streamingChunksRef.current = []; - deps.textFinalizedRef.current = false; - deps.streamEndedRef.current = true; -} - -export function getErrorDisplayMessage(chunk: StreamChunk): string { - const friendlyMessage = getUserFriendlyErrorMessage(chunk.code); - if (friendlyMessage) { - return friendlyMessage; - } - return chunk.message || chunk.content || "An error occurred"; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts deleted file mode 100644 index f1e94cea17..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/helpers.ts +++ /dev/null @@ -1,607 +0,0 @@ -import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; -import { SessionKey, sessionStorage } from "@/services/storage/session-storage"; -import type { ToolResult } from "@/types/chat"; -import type { ChatMessageData } from "../ChatMessage/useChatMessage"; - -export function processInitialMessages( - initialMessages: SessionDetailResponse["messages"], -): ChatMessageData[] { - const processedMessages: ChatMessageData[] = []; - const toolCallMap = new Map(); - - for (const msg of initialMessages) { - if (!isValidMessage(msg)) { - console.warn("Invalid message structure from backend:", msg); - continue; - } - - let content = String(msg.content || ""); - const role = String(msg.role || "assistant").toLowerCase(); - const toolCalls = msg.tool_calls; - const timestamp = msg.timestamp - ? new Date(msg.timestamp as string) - : undefined; - - if (role === "user") { - content = removePageContext(content); - if (!content.trim()) continue; - processedMessages.push({ - type: "message", - role: "user", - content, - timestamp, - }); - continue; - } - - if (role === "assistant") { - content = content - .replace(/[\s\S]*?<\/thinking>/gi, "") - .replace(/[\s\S]*?<\/internal_reasoning>/gi, "") - .trim(); - - if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) { - for (const toolCall of toolCalls) { - const toolName = toolCall.function.name; - const toolId = toolCall.id; - toolCallMap.set(toolId, toolName); - - try { - const args = JSON.parse(toolCall.function.arguments || "{}"); - processedMessages.push({ - type: "tool_call", - toolId, - toolName, - arguments: args, - timestamp, - }); - } catch (err) { - console.warn("Failed to parse tool call arguments:", err); - processedMessages.push({ - type: "tool_call", - toolId, - toolName, - arguments: {}, - timestamp, - }); - } - } - if (content.trim()) { - processedMessages.push({ - type: "message", - role: "assistant", - content, - timestamp, - }); - } - } else if (content.trim()) { - processedMessages.push({ - type: "message", - role: "assistant", - content, - timestamp, - }); - } - continue; - } - - if (role === "tool") { - const toolCallId = (msg.tool_call_id as string) || ""; - const toolName = toolCallMap.get(toolCallId) || "unknown"; - const toolResponse = parseToolResponse( - content, - toolCallId, - toolName, - timestamp, - ); - if (toolResponse) { - processedMessages.push(toolResponse); - } - continue; - } - - if (content.trim()) { - processedMessages.push({ - type: "message", - role: role as "user" | "assistant" | "system", - content, - timestamp, - }); - } - } - - return processedMessages; -} - -export function hasSentInitialPrompt(sessionId: string): boolean { - try { - const sent = JSON.parse( - sessionStorage.get(SessionKey.CHAT_SENT_INITIAL_PROMPTS) || "{}", - ); - return sent[sessionId] === true; - } catch { - return false; - } -} - -export function markInitialPromptSent(sessionId: string): void { - try { - const sent = JSON.parse( - sessionStorage.get(SessionKey.CHAT_SENT_INITIAL_PROMPTS) || "{}", - ); - sent[sessionId] = true; - sessionStorage.set( - SessionKey.CHAT_SENT_INITIAL_PROMPTS, - JSON.stringify(sent), - ); - } catch { - // Ignore storage errors - } -} - -export function removePageContext(content: string): string { - // Remove "Page URL: ..." pattern at start of line (case insensitive, handles various formats) - let cleaned = content.replace(/^\s*Page URL:\s*[^\n\r]*/gim, ""); - - // Find "User Message:" marker at start of line to preserve the actual user message - const userMessageMatch = cleaned.match(/^\s*User Message:\s*([\s\S]*)$/im); - if (userMessageMatch) { - // If we found "User Message:", extract everything after it - cleaned = userMessageMatch[1]; - } else { - // If no "User Message:" marker, remove "Page Content:" and everything after it at start of line - cleaned = cleaned.replace(/^\s*Page Content:[\s\S]*$/gim, ""); - } - - // Clean up extra whitespace and newlines - cleaned = cleaned.replace(/\n\s*\n\s*\n+/g, "\n\n").trim(); - return cleaned; -} - -export function createUserMessage(content: string): ChatMessageData { - return { - type: "message", - role: "user", - content, - timestamp: new Date(), - }; -} - -export function filterAuthMessages( - messages: ChatMessageData[], -): ChatMessageData[] { - return messages.filter( - (msg) => msg.type !== "credentials_needed" && msg.type !== "login_needed", - ); -} - -export function isValidMessage(msg: unknown): msg is Record { - if (typeof msg !== "object" || msg === null) { - return false; - } - const m = msg as Record; - if (typeof m.role !== "string") { - return false; - } - if (m.content !== undefined && typeof m.content !== "string") { - return false; - } - return true; -} - -export function isToolCallArray(value: unknown): value is Array<{ - id: string; - type: string; - function: { name: string; arguments: string }; -}> { - if (!Array.isArray(value)) { - return false; - } - return value.every( - (item) => - typeof item === "object" && - item !== null && - "id" in item && - typeof item.id === "string" && - "type" in item && - typeof item.type === "string" && - "function" in item && - typeof item.function === "object" && - item.function !== null && - "name" in item.function && - typeof item.function.name === "string" && - "arguments" in item.function && - typeof item.function.arguments === "string", - ); -} - -export function isAgentArray(value: unknown): value is Array<{ - id: string; - name: string; - description: string; - version?: number; - image_url?: string; -}> { - if (!Array.isArray(value)) { - return false; - } - return value.every( - (item) => - typeof item === "object" && - item !== null && - "id" in item && - typeof item.id === "string" && - "name" in item && - typeof item.name === "string" && - "description" in item && - typeof item.description === "string" && - (!("version" in item) || typeof item.version === "number") && - (!("image_url" in item) || typeof item.image_url === "string"), - ); -} - -export function extractJsonFromErrorMessage( - message: string, -): Record | null { - try { - const start = message.indexOf("{"); - if (start === -1) { - return null; - } - let depth = 0; - let end = -1; - for (let i = start; i < message.length; i++) { - const ch = message[i]; - if (ch === "{") { - depth++; - } else if (ch === "}") { - depth--; - if (depth === 0) { - end = i; - break; - } - } - } - if (end === -1) { - return null; - } - const jsonStr = message.slice(start, end + 1); - return JSON.parse(jsonStr) as Record; - } catch { - return null; - } -} - -export function parseToolResponse( - result: ToolResult, - toolId: string, - toolName: string, - timestamp?: Date, -): ChatMessageData | null { - let parsedResult: Record | null = null; - try { - parsedResult = - typeof result === "string" - ? JSON.parse(result) - : (result as Record); - } catch { - parsedResult = null; - } - if (parsedResult && typeof parsedResult === "object") { - const responseType = parsedResult.type as string | undefined; - if (responseType === "no_results") { - return { - type: "tool_response", - toolId, - toolName, - result: (parsedResult.message as string) || "No results found", - success: true, - timestamp: timestamp || new Date(), - }; - } - if (responseType === "agent_carousel") { - const agentsData = parsedResult.agents; - if (isAgentArray(agentsData)) { - return { - type: "agent_carousel", - toolId, - toolName: "agent_carousel", - agents: agentsData, - totalCount: parsedResult.total_count as number | undefined, - timestamp: timestamp || new Date(), - }; - } else { - console.warn("Invalid agents array in agent_carousel response"); - } - } - if (responseType === "execution_started") { - return { - type: "execution_started", - toolId, - toolName: "execution_started", - executionId: (parsedResult.execution_id as string) || "", - agentName: (parsedResult.graph_name as string) || undefined, - message: parsedResult.message as string | undefined, - libraryAgentLink: parsedResult.library_agent_link as string | undefined, - timestamp: timestamp || new Date(), - }; - } - if (responseType === "clarification_needed") { - return { - type: "clarification_needed", - toolName, - questions: - (parsedResult.questions as Array<{ - question: string; - keyword: string; - example?: string; - }>) || [], - message: - (parsedResult.message as string) || - "I need more information to proceed.", - sessionId: (parsedResult.session_id as string) || "", - timestamp: timestamp || new Date(), - }; - } - if (responseType === "operation_started") { - return { - type: "operation_started", - toolName: (parsedResult.tool_name as string) || toolName, - toolId, - operationId: (parsedResult.operation_id as string) || "", - taskId: (parsedResult.task_id as string) || undefined, // For SSE reconnection - message: - (parsedResult.message as string) || - "Operation started. You can close this tab.", - timestamp: timestamp || new Date(), - }; - } - if (responseType === "operation_pending") { - return { - type: "operation_pending", - toolName: (parsedResult.tool_name as string) || toolName, - toolId, - operationId: (parsedResult.operation_id as string) || "", - message: - (parsedResult.message as string) || - "Operation in progress. Please wait...", - timestamp: timestamp || new Date(), - }; - } - if (responseType === "operation_in_progress") { - return { - type: "operation_in_progress", - toolName: (parsedResult.tool_name as string) || toolName, - toolCallId: (parsedResult.tool_call_id as string) || toolId, - message: - (parsedResult.message as string) || - "Operation already in progress. Please wait...", - timestamp: timestamp || new Date(), - }; - } - if (responseType === "need_login") { - return { - type: "login_needed", - toolName: "login_needed", - message: - (parsedResult.message as string) || - "Please sign in to use chat and agent features", - sessionId: (parsedResult.session_id as string) || "", - agentInfo: parsedResult.agent_info as - | { - graph_id: string; - name: string; - trigger_type: string; - } - | undefined, - timestamp: timestamp || new Date(), - }; - } - if (responseType === "setup_requirements") { - return null; - } - if (responseType === "understanding_updated") { - return { - type: "tool_response", - toolId, - toolName, - result: (parsedResult || result) as ToolResult, - success: true, - timestamp: timestamp || new Date(), - }; - } - } - return { - type: "tool_response", - toolId, - toolName, - result: parsedResult ? (parsedResult as ToolResult) : result, - success: true, - timestamp: timestamp || new Date(), - }; -} - -export function isUserReadiness( - value: unknown, -): value is { missing_credentials?: Record } { - return ( - typeof value === "object" && - value !== null && - (!("missing_credentials" in value) || - typeof (value as any).missing_credentials === "object") - ); -} - -export function isMissingCredentials( - value: unknown, -): value is Record> { - if (typeof value !== "object" || value === null) { - return false; - } - return Object.values(value).every((v) => typeof v === "object" && v !== null); -} - -export function isSetupInfo(value: unknown): value is { - user_readiness?: Record; - agent_name?: string; -} { - return ( - typeof value === "object" && - value !== null && - (!("user_readiness" in value) || - typeof (value as any).user_readiness === "object") && - (!("agent_name" in value) || typeof (value as any).agent_name === "string") - ); -} - -export function extractCredentialsNeeded( - parsedResult: Record, - toolName: string = "run_agent", -): ChatMessageData | null { - try { - const setupInfo = parsedResult?.setup_info as - | Record - | undefined; - const userReadiness = setupInfo?.user_readiness as - | Record - | undefined; - const missingCreds = userReadiness?.missing_credentials as - | Record> - | undefined; - if (missingCreds && Object.keys(missingCreds).length > 0) { - const agentName = (setupInfo?.agent_name as string) || "this block"; - const credentials = Object.values(missingCreds).map((credInfo) => { - // Normalize to array at boundary - prefer 'types' array, fall back to single 'type' - const typesArray = credInfo.types as - | Array<"api_key" | "oauth2" | "user_password" | "host_scoped"> - | undefined; - const singleType = - (credInfo.type as - | "api_key" - | "oauth2" - | "user_password" - | "host_scoped" - | undefined) || "api_key"; - const credentialTypes = - typesArray && typesArray.length > 0 ? typesArray : [singleType]; - - return { - provider: (credInfo.provider as string) || "unknown", - providerName: - (credInfo.provider_name as string) || - (credInfo.provider as string) || - "Unknown Provider", - credentialTypes, - title: - (credInfo.title as string) || - `${(credInfo.provider_name as string) || (credInfo.provider as string)} credentials`, - scopes: credInfo.scopes as string[] | undefined, - }; - }); - return { - type: "credentials_needed", - toolName, - credentials, - message: `To run ${agentName}, you need to add ${credentials.length === 1 ? "credentials" : `${credentials.length} credentials`}.`, - agentName, - timestamp: new Date(), - }; - } - return null; - } catch (err) { - console.error("Failed to extract credentials from setup info:", err); - return null; - } -} - -export function extractInputsNeeded( - parsedResult: Record, - toolName: string = "run_agent", -): ChatMessageData | null { - try { - const setupInfo = parsedResult?.setup_info as - | Record - | undefined; - const requirements = setupInfo?.requirements as - | Record - | undefined; - const inputs = requirements?.inputs as - | Array> - | undefined; - const credentials = requirements?.credentials as - | Array> - | undefined; - - if (!inputs || inputs.length === 0) { - return null; - } - - const agentName = (setupInfo?.agent_name as string) || "this agent"; - const agentId = parsedResult?.graph_id as string | undefined; - const graphVersion = parsedResult?.graph_version as number | undefined; - - const properties: Record = {}; - const requiredProps: string[] = []; - inputs.forEach((input) => { - const name = input.name as string; - if (name) { - properties[name] = { - title: input.name as string, - description: (input.description as string) || "", - type: (input.type as string) || "string", - default: input.default, - enum: input.options, - format: input.format, - }; - if ((input.required as boolean) === true) { - requiredProps.push(name); - } - } - }); - - const inputSchema: Record = { - type: "object", - properties, - }; - if (requiredProps.length > 0) { - inputSchema.required = requiredProps; - } - - const credentialsSchema: Record = {}; - if (credentials && credentials.length > 0) { - credentials.forEach((cred) => { - const id = cred.id as string; - if (id) { - const credentialTypes = Array.isArray(cred.types) - ? cred.types - : [(cred.type as string) || "api_key"]; - credentialsSchema[id] = { - type: "object", - properties: {}, - credentials_provider: [cred.provider as string], - credentials_types: credentialTypes, - credentials_scopes: cred.scopes as string[] | undefined, - }; - } - }); - } - - return { - type: "inputs_needed", - toolName, - agentName, - agentId, - graphVersion, - inputSchema, - credentialsSchema: - Object.keys(credentialsSchema).length > 0 - ? credentialsSchema - : undefined, - message: `Please provide the required inputs to run ${agentName}.`, - timestamp: new Date(), - }; - } catch (err) { - console.error("Failed to extract inputs from setup info:", err); - return null; - } -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts deleted file mode 100644 index 248383df42..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/useChatContainer.ts +++ /dev/null @@ -1,517 +0,0 @@ -import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; -import { useEffect, useMemo, useRef, useState } from "react"; -import { INITIAL_STREAM_ID } from "../../chat-constants"; -import { useChatStore } from "../../chat-store"; -import { toast } from "sonner"; -import { useChatStream } from "../../useChatStream"; -import { usePageContext } from "../../usePageContext"; -import type { ChatMessageData } from "../ChatMessage/useChatMessage"; -import { - getToolIdFromMessage, - hasToolId, - isOperationMessage, - type StreamChunk, -} from "../../chat-types"; -import { createStreamEventDispatcher } from "./createStreamEventDispatcher"; -import { - createUserMessage, - filterAuthMessages, - hasSentInitialPrompt, - markInitialPromptSent, - processInitialMessages, -} from "./helpers"; - -const TOOL_RESULT_TYPES = new Set([ - "tool_response", - "agent_carousel", - "execution_started", - "clarification_needed", -]); - -// Helper to generate deduplication key for a message -function getMessageKey(msg: ChatMessageData): string { - if (msg.type === "message") { - // Don't include timestamp - dedupe by role + content only - // This handles the case where local and server timestamps differ - // Server messages are authoritative, so duplicates from local state are filtered - return `msg:${msg.role}:${msg.content}`; - } else if (msg.type === "tool_call") { - return `toolcall:${msg.toolId}`; - } else if (TOOL_RESULT_TYPES.has(msg.type)) { - // Unified key for all tool result types - same toolId with different types - // (tool_response vs agent_carousel) should deduplicate to the same key - const toolId = getToolIdFromMessage(msg); - // If no toolId, fall back to content-based key to avoid empty key collisions - if (!toolId) { - return `toolresult:content:${JSON.stringify(msg).slice(0, 200)}`; - } - return `toolresult:${toolId}`; - } else if (isOperationMessage(msg)) { - const toolId = getToolIdFromMessage(msg) || ""; - return `op:${toolId}:${msg.toolName}`; - } else { - return `${msg.type}:${JSON.stringify(msg).slice(0, 100)}`; - } -} - -interface Args { - sessionId: string | null; - initialMessages: SessionDetailResponse["messages"]; - initialPrompt?: string; - onOperationStarted?: () => void; - /** Active stream info from the server for reconnection */ - activeStream?: { - taskId: string; - lastMessageId: string; - operationId: string; - toolName: string; - }; -} - -export function useChatContainer({ - sessionId, - initialMessages, - initialPrompt, - onOperationStarted, - activeStream, -}: Args) { - const [messages, setMessages] = useState([]); - const [streamingChunks, setStreamingChunks] = useState([]); - const [hasTextChunks, setHasTextChunks] = useState(false); - const [isStreamingInitiated, setIsStreamingInitiated] = useState(false); - const [isRegionBlockedModalOpen, setIsRegionBlockedModalOpen] = - useState(false); - const hasResponseRef = useRef(false); - const streamingChunksRef = useRef([]); - const textFinalizedRef = useRef(false); - const streamEndedRef = useRef(false); - const previousSessionIdRef = useRef(null); - const { - error, - sendMessage: sendStreamMessage, - stopStreaming, - } = useChatStream(); - const activeStreams = useChatStore((s) => s.activeStreams); - const subscribeToStream = useChatStore((s) => s.subscribeToStream); - const setActiveTask = useChatStore((s) => s.setActiveTask); - const getActiveTask = useChatStore((s) => s.getActiveTask); - const reconnectToTask = useChatStore((s) => s.reconnectToTask); - const isStreaming = isStreamingInitiated || hasTextChunks; - // Track whether we've already connected to this activeStream to avoid duplicate connections - const connectedActiveStreamRef = useRef(null); - // Track if component is mounted to prevent state updates after unmount - const isMountedRef = useRef(true); - // Track current dispatcher to prevent multiple dispatchers from adding messages - const currentDispatcherIdRef = useRef(0); - - // Set mounted flag - reset on every mount, cleanup on unmount - useEffect(function trackMountedState() { - isMountedRef.current = true; - return function cleanup() { - isMountedRef.current = false; - }; - }, []); - - // Callback to store active task info for SSE reconnection - function handleActiveTaskStarted(taskInfo: { - taskId: string; - operationId: string; - toolName: string; - toolCallId: string; - }) { - if (!sessionId) return; - setActiveTask(sessionId, { - taskId: taskInfo.taskId, - operationId: taskInfo.operationId, - toolName: taskInfo.toolName, - lastMessageId: INITIAL_STREAM_ID, - }); - } - - // Create dispatcher for stream events - stable reference for current sessionId - // Each dispatcher gets a unique ID to prevent stale dispatchers from updating state - function createDispatcher() { - if (!sessionId) return () => {}; - // Increment dispatcher ID - only the most recent dispatcher should update state - const dispatcherId = ++currentDispatcherIdRef.current; - - const baseDispatcher = createStreamEventDispatcher({ - setHasTextChunks, - setStreamingChunks, - streamingChunksRef, - hasResponseRef, - textFinalizedRef, - streamEndedRef, - setMessages, - setIsRegionBlockedModalOpen, - sessionId, - setIsStreamingInitiated, - onOperationStarted, - onActiveTaskStarted: handleActiveTaskStarted, - }); - - // Wrap dispatcher to check if it's still the current one - return function guardedDispatcher(chunk: StreamChunk) { - // Skip if component unmounted or this is a stale dispatcher - if (!isMountedRef.current) { - return; - } - if (dispatcherId !== currentDispatcherIdRef.current) { - return; - } - baseDispatcher(chunk); - }; - } - - useEffect( - function handleSessionChange() { - const isSessionChange = sessionId !== previousSessionIdRef.current; - - // Handle session change - reset state - if (isSessionChange) { - const prevSession = previousSessionIdRef.current; - if (prevSession) { - stopStreaming(prevSession); - } - previousSessionIdRef.current = sessionId; - connectedActiveStreamRef.current = null; - setMessages([]); - setStreamingChunks([]); - streamingChunksRef.current = []; - setHasTextChunks(false); - setIsStreamingInitiated(false); - hasResponseRef.current = false; - textFinalizedRef.current = false; - streamEndedRef.current = false; - } - - if (!sessionId) return; - - // Priority 1: Check if server told us there's an active stream (most authoritative) - if (activeStream) { - const streamKey = `${sessionId}:${activeStream.taskId}`; - - if (connectedActiveStreamRef.current === streamKey) { - return; - } - - // Skip if there's already an active stream for this session in the store - const existingStream = activeStreams.get(sessionId); - if (existingStream && existingStream.status === "streaming") { - connectedActiveStreamRef.current = streamKey; - return; - } - - connectedActiveStreamRef.current = streamKey; - - // Clear all state before reconnection to prevent duplicates - // Server's initialMessages is authoritative; local state will be rebuilt from SSE replay - setMessages([]); - setStreamingChunks([]); - streamingChunksRef.current = []; - setHasTextChunks(false); - textFinalizedRef.current = false; - streamEndedRef.current = false; - hasResponseRef.current = false; - - setIsStreamingInitiated(true); - setActiveTask(sessionId, { - taskId: activeStream.taskId, - operationId: activeStream.operationId, - toolName: activeStream.toolName, - lastMessageId: activeStream.lastMessageId, - }); - reconnectToTask( - sessionId, - activeStream.taskId, - activeStream.lastMessageId, - createDispatcher(), - ); - // Don't return cleanup here - the guarded dispatcher handles stale events - // and the stream will complete naturally. Cleanup would prematurely stop - // the stream when effect re-runs due to activeStreams changing. - return; - } - - // Only check localStorage/in-memory on session change - if (!isSessionChange) return; - - // Priority 2: Check localStorage for active task - const activeTask = getActiveTask(sessionId); - if (activeTask) { - // Clear all state before reconnection to prevent duplicates - // Server's initialMessages is authoritative; local state will be rebuilt from SSE replay - setMessages([]); - setStreamingChunks([]); - streamingChunksRef.current = []; - setHasTextChunks(false); - textFinalizedRef.current = false; - streamEndedRef.current = false; - hasResponseRef.current = false; - - setIsStreamingInitiated(true); - reconnectToTask( - sessionId, - activeTask.taskId, - activeTask.lastMessageId, - createDispatcher(), - ); - // Don't return cleanup here - the guarded dispatcher handles stale events - return; - } - - // Priority 3: Check for an in-memory active stream (same-tab scenario) - const inMemoryStream = activeStreams.get(sessionId); - if (!inMemoryStream || inMemoryStream.status !== "streaming") { - return; - } - - setIsStreamingInitiated(true); - const skipReplay = initialMessages.length > 0; - return subscribeToStream(sessionId, createDispatcher(), skipReplay); - }, - [ - sessionId, - stopStreaming, - activeStreams, - subscribeToStream, - onOperationStarted, - getActiveTask, - reconnectToTask, - activeStream, - setActiveTask, - ], - ); - - // Collect toolIds from completed tool results in initialMessages - // Used to filter out operation messages when their results arrive - const completedToolIds = useMemo(() => { - const processedInitial = processInitialMessages(initialMessages); - const ids = new Set(); - for (const msg of processedInitial) { - if ( - msg.type === "tool_response" || - msg.type === "agent_carousel" || - msg.type === "execution_started" - ) { - const toolId = hasToolId(msg) ? msg.toolId : undefined; - if (toolId) { - ids.add(toolId); - } - } - } - return ids; - }, [initialMessages]); - - // Clean up local operation messages when their completed results arrive from polling - // This effect runs when completedToolIds changes (i.e., when polling brings new results) - useEffect( - function cleanupCompletedOperations() { - if (completedToolIds.size === 0) return; - - setMessages((prev) => { - const filtered = prev.filter((msg) => { - if (isOperationMessage(msg)) { - const toolId = getToolIdFromMessage(msg); - if (toolId && completedToolIds.has(toolId)) { - return false; // Remove - operation completed - } - } - return true; - }); - // Only update state if something was actually filtered - return filtered.length === prev.length ? prev : filtered; - }); - }, - [completedToolIds], - ); - - // Combine initial messages from backend with local streaming messages, - // Server messages maintain correct order; only append truly new local messages - const allMessages = useMemo(() => { - const processedInitial = processInitialMessages(initialMessages); - - // Build a set of keys from server messages for deduplication - const serverKeys = new Set(); - for (const msg of processedInitial) { - serverKeys.add(getMessageKey(msg)); - } - - // Filter local messages: remove duplicates and completed operation messages - const newLocalMessages = messages.filter((msg) => { - // Remove operation messages for completed tools - if (isOperationMessage(msg)) { - const toolId = getToolIdFromMessage(msg); - if (toolId && completedToolIds.has(toolId)) { - return false; - } - } - // Remove messages that already exist in server data - const key = getMessageKey(msg); - return !serverKeys.has(key); - }); - - // Server messages first (correct order), then new local messages - const combined = [...processedInitial, ...newLocalMessages]; - - // Post-processing: Remove duplicate assistant messages that can occur during - // race conditions (e.g., rapid screen switching during SSE reconnection). - // Two assistant messages are considered duplicates if: - // - They are both text messages with role "assistant" - // - One message's content starts with the other's content (partial vs complete) - // - Or they have very similar content (>80% overlap at the start) - const deduplicated: ChatMessageData[] = []; - for (let i = 0; i < combined.length; i++) { - const current = combined[i]; - - // Check if this is an assistant text message - if (current.type !== "message" || current.role !== "assistant") { - deduplicated.push(current); - continue; - } - - // Look for duplicate assistant messages in the rest of the array - let dominated = false; - for (let j = 0; j < combined.length; j++) { - if (i === j) continue; - const other = combined[j]; - if (other.type !== "message" || other.role !== "assistant") continue; - - const currentContent = current.content || ""; - const otherContent = other.content || ""; - - // Skip empty messages - if (!currentContent.trim() || !otherContent.trim()) continue; - - // Check if current is a prefix of other (current is incomplete version) - if ( - otherContent.length > currentContent.length && - otherContent.startsWith(currentContent.slice(0, 100)) - ) { - // Current is a shorter/incomplete version of other - skip it - dominated = true; - break; - } - - // Check if messages are nearly identical (within a small difference) - // This catches cases where content differs only slightly - const minLen = Math.min(currentContent.length, otherContent.length); - const compareLen = Math.min(minLen, 200); // Compare first 200 chars - if ( - compareLen > 50 && - currentContent.slice(0, compareLen) === - otherContent.slice(0, compareLen) - ) { - // Same prefix - keep the longer one - if (otherContent.length > currentContent.length) { - dominated = true; - break; - } - } - } - - if (!dominated) { - deduplicated.push(current); - } - } - - return deduplicated; - }, [initialMessages, messages, completedToolIds]); - - async function sendMessage( - content: string, - isUserMessage: boolean = true, - context?: { url: string; content: string }, - ) { - if (!sessionId) return; - - setIsRegionBlockedModalOpen(false); - if (isUserMessage) { - const userMessage = createUserMessage(content); - setMessages((prev) => [...filterAuthMessages(prev), userMessage]); - } else { - setMessages((prev) => filterAuthMessages(prev)); - } - setStreamingChunks([]); - streamingChunksRef.current = []; - setHasTextChunks(false); - setIsStreamingInitiated(true); - hasResponseRef.current = false; - textFinalizedRef.current = false; - streamEndedRef.current = false; - - try { - await sendStreamMessage( - sessionId, - content, - createDispatcher(), - isUserMessage, - context, - ); - } catch (err) { - setIsStreamingInitiated(false); - if (err instanceof Error && err.name === "AbortError") return; - - const errorMessage = - err instanceof Error ? err.message : "Failed to send message"; - toast.error("Failed to send message", { - description: errorMessage, - }); - } - } - - function handleStopStreaming() { - stopStreaming(); - setStreamingChunks([]); - streamingChunksRef.current = []; - setHasTextChunks(false); - setIsStreamingInitiated(false); - } - - const { capturePageContext } = usePageContext(); - const sendMessageRef = useRef(sendMessage); - sendMessageRef.current = sendMessage; - - useEffect( - function handleInitialPrompt() { - if (!initialPrompt || !sessionId) return; - if (initialMessages.length > 0) return; - if (hasSentInitialPrompt(sessionId)) return; - - markInitialPromptSent(sessionId); - const context = capturePageContext(); - sendMessageRef.current(initialPrompt, true, context); - }, - [initialPrompt, sessionId, initialMessages.length, capturePageContext], - ); - - async function sendMessageWithContext( - content: string, - isUserMessage: boolean = true, - ) { - const context = capturePageContext(); - await sendMessage(content, isUserMessage, context); - } - - function handleRegionModalOpenChange(open: boolean) { - setIsRegionBlockedModalOpen(open); - } - - function handleRegionModalClose() { - setIsRegionBlockedModalOpen(false); - } - - return { - messages: allMessages, - streamingChunks, - isStreaming, - error, - isRegionBlockedModalOpen, - setIsRegionBlockedModalOpen, - sendMessageWithContext, - handleRegionModalOpenChange, - handleRegionModalClose, - sendMessage, - stopStreaming: handleStopStreaming, - }; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/ChatCredentialsSetup.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/ChatCredentialsSetup.tsx deleted file mode 100644 index f0dfadd1f7..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/ChatCredentialsSetup.tsx +++ /dev/null @@ -1,151 +0,0 @@ -import { Text } from "@/components/atoms/Text/Text"; -import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput"; -import type { BlockIOCredentialsSubSchema } from "@/lib/autogpt-server-api"; -import { cn } from "@/lib/utils"; -import { CheckIcon, RobotIcon, WarningIcon } from "@phosphor-icons/react"; -import { useEffect, useRef } from "react"; -import { useChatCredentialsSetup } from "./useChatCredentialsSetup"; - -export interface CredentialInfo { - provider: string; - providerName: string; - credentialTypes: Array< - "api_key" | "oauth2" | "user_password" | "host_scoped" - >; - title: string; - scopes?: string[]; -} - -interface Props { - credentials: CredentialInfo[]; - agentName?: string; - message: string; - onAllCredentialsComplete: () => void; - onCancel: () => void; - className?: string; -} - -function createSchemaFromCredentialInfo( - credential: CredentialInfo, -): BlockIOCredentialsSubSchema { - return { - type: "object", - properties: {}, - credentials_provider: [credential.provider], - credentials_types: credential.credentialTypes, - credentials_scopes: credential.scopes, - discriminator: undefined, - discriminator_mapping: undefined, - discriminator_values: undefined, - }; -} - -export function ChatCredentialsSetup({ - credentials, - agentName: _agentName, - message, - onAllCredentialsComplete, - onCancel: _onCancel, -}: Props) { - const { selectedCredentials, isAllComplete, handleCredentialSelect } = - useChatCredentialsSetup(credentials); - - // Track if we've already called completion to prevent double calls - const hasCalledCompleteRef = useRef(false); - - // Reset the completion flag when credentials change (new credential setup flow) - useEffect( - function resetCompletionFlag() { - hasCalledCompleteRef.current = false; - }, - [credentials], - ); - - // Auto-call completion when all credentials are configured - useEffect( - function autoCompleteWhenReady() { - if (isAllComplete && !hasCalledCompleteRef.current) { - hasCalledCompleteRef.current = true; - onAllCredentialsComplete(); - } - }, - [isAllComplete, onAllCredentialsComplete], - ); - - return ( -
-
-
-
- -
-
- -
-
-
-
-
- - Credentials Required - - - {message} - -
- -
- {credentials.map((cred, index) => { - const schema = createSchemaFromCredentialInfo(cred); - const isSelected = !!selectedCredentials[cred.provider]; - - return ( -
-
- {isSelected ? ( - - ) : ( - - )} - - {cred.providerName} - -
- - - handleCredentialSelect(cred.provider, credMeta) - } - /> -
- ); - })} -
-
-
-
-
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/useChatCredentialsSetup.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/useChatCredentialsSetup.ts deleted file mode 100644 index 6b4b26e834..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatCredentialsSetup/useChatCredentialsSetup.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { useState, useMemo } from "react"; -import type { CredentialInfo } from "./ChatCredentialsSetup"; -import type { CredentialsMetaInput } from "@/lib/autogpt-server-api"; - -export function useChatCredentialsSetup(credentials: CredentialInfo[]) { - const [selectedCredentials, setSelectedCredentials] = useState< - Record - >({}); - - // Check if all credentials are configured - const isAllComplete = useMemo( - function checkAllComplete() { - if (credentials.length === 0) return false; - return credentials.every((cred) => selectedCredentials[cred.provider]); - }, - [credentials, selectedCredentials], - ); - - function handleCredentialSelect( - provider: string, - credential?: CredentialsMetaInput, - ) { - if (credential) { - setSelectedCredentials((prev) => ({ - ...prev, - [provider]: credential, - })); - } - } - - return { - selectedCredentials, - isAllComplete, - handleCredentialSelect, - }; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatErrorState/ChatErrorState.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatErrorState/ChatErrorState.tsx deleted file mode 100644 index bac13d1b0c..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatErrorState/ChatErrorState.tsx +++ /dev/null @@ -1,30 +0,0 @@ -import React from "react"; -import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; -import { cn } from "@/lib/utils"; - -export interface ChatErrorStateProps { - error: Error; - onRetry?: () => void; - className?: string; -} - -export function ChatErrorState({ - error, - onRetry, - className, -}: ChatErrorStateProps) { - return ( -
- -
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoader/ChatLoader.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoader/ChatLoader.tsx deleted file mode 100644 index 76cee8dbae..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoader/ChatLoader.tsx +++ /dev/null @@ -1,7 +0,0 @@ -export function ChatLoader() { - return ( -
-
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoadingState/ChatLoadingState.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoadingState/ChatLoadingState.tsx deleted file mode 100644 index c0cdb33c50..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatLoadingState/ChatLoadingState.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; -import { cn } from "@/lib/utils"; - -export interface ChatLoadingStateProps { - message?: string; - className?: string; -} - -export function ChatLoadingState({ className }: ChatLoadingStateProps) { - return ( -
-
- -
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/ChatMessage.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/ChatMessage.tsx deleted file mode 100644 index 44dae40eb4..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/ChatMessage.tsx +++ /dev/null @@ -1,448 +0,0 @@ -"use client"; - -import { Button } from "@/components/atoms/Button/Button"; -import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; -import { cn } from "@/lib/utils"; -import { - ArrowsClockwiseIcon, - CheckCircleIcon, - CheckIcon, -} from "@phosphor-icons/react"; -import { useRouter } from "next/navigation"; -import { useCallback, useState } from "react"; -import { AgentCarouselMessage } from "../AgentCarouselMessage/AgentCarouselMessage"; -import { AIChatBubble } from "../AIChatBubble/AIChatBubble"; -import { AuthPromptWidget } from "../AuthPromptWidget/AuthPromptWidget"; -import { ChatCredentialsSetup } from "../ChatCredentialsSetup/ChatCredentialsSetup"; -import { ClarificationQuestionsWidget } from "../ClarificationQuestionsWidget/ClarificationQuestionsWidget"; -import { ExecutionStartedMessage } from "../ExecutionStartedMessage/ExecutionStartedMessage"; -import { PendingOperationWidget } from "../PendingOperationWidget/PendingOperationWidget"; -import { MarkdownContent } from "../MarkdownContent/MarkdownContent"; -import { NoResultsMessage } from "../NoResultsMessage/NoResultsMessage"; -import { ToolCallMessage } from "../ToolCallMessage/ToolCallMessage"; -import { ToolResponseMessage } from "../ToolResponseMessage/ToolResponseMessage"; -import { UserChatBubble } from "../UserChatBubble/UserChatBubble"; -import { useChatMessage, type ChatMessageData } from "./useChatMessage"; - -function stripInternalReasoning(content: string): string { - const cleaned = content.replace( - /[\s\S]*?<\/internal_reasoning>/gi, - "", - ); - return cleaned.replace(/\n{3,}/g, "\n\n").trim(); -} - -function getDisplayContent(message: ChatMessageData, isUser: boolean): string { - if (message.type !== "message") return ""; - if (isUser) return message.content; - return stripInternalReasoning(message.content); -} - -export interface ChatMessageProps { - message: ChatMessageData; - messages?: ChatMessageData[]; - index?: number; - isStreaming?: boolean; - className?: string; - onDismissLogin?: () => void; - onDismissCredentials?: () => void; - onSendMessage?: (content: string, isUserMessage?: boolean) => void; - agentOutput?: ChatMessageData; - isFinalMessage?: boolean; -} - -export function ChatMessage({ - message, - messages = [], - index = -1, - isStreaming = false, - className, - onDismissCredentials, - onSendMessage, - agentOutput, - isFinalMessage = true, -}: ChatMessageProps) { - const { user } = useSupabase(); - const router = useRouter(); - const [copied, setCopied] = useState(false); - const { - isUser, - isToolCall, - isToolResponse, - isLoginNeeded, - isCredentialsNeeded, - isClarificationNeeded, - isOperationStarted, - isOperationPending, - isOperationInProgress, - } = useChatMessage(message); - const displayContent = getDisplayContent(message, isUser); - - const handleAllCredentialsComplete = useCallback( - function handleAllCredentialsComplete() { - // Send a user message that explicitly asks to retry the setup - // This ensures the LLM calls get_required_setup_info again and proceeds with execution - if (onSendMessage) { - onSendMessage( - "I've configured the required credentials. Please check if everything is ready and proceed with setting up the agent.", - ); - } - // Optionally dismiss the credentials prompt - if (onDismissCredentials) { - onDismissCredentials(); - } - }, - [onSendMessage, onDismissCredentials], - ); - - function handleCancelCredentials() { - // Dismiss the credentials prompt - if (onDismissCredentials) { - onDismissCredentials(); - } - } - - const handleCopy = useCallback( - async function handleCopy() { - if (message.type !== "message") return; - if (!displayContent) return; - - try { - await navigator.clipboard.writeText(displayContent); - setCopied(true); - setTimeout(() => setCopied(false), 2000); - } catch (error) { - console.error("Failed to copy:", error); - } - }, - [displayContent, message], - ); - - const handleTryAgain = useCallback(() => { - if (message.type !== "message" || !onSendMessage) return; - onSendMessage(message.content, message.role === "user"); - }, [message, onSendMessage]); - - const handleViewExecution = useCallback(() => { - if (message.type === "execution_started" && message.libraryAgentLink) { - router.push(message.libraryAgentLink); - } - }, [message, router]); - - // Render credentials needed messages - if (isCredentialsNeeded && message.type === "credentials_needed") { - return ( - - ); - } - - if (isClarificationNeeded && message.type === "clarification_needed") { - const hasUserReplyAfter = - index >= 0 && - messages - .slice(index + 1) - .some((m) => m.type === "message" && m.role === "user"); - - const handleClarificationAnswers = (answers: Record) => { - if (onSendMessage) { - // Iterate over questions (preserves original order) instead of answers - const contextMessage = message.questions - .map((q) => { - const answer = answers[q.keyword] || ""; - return `> ${q.question}\n\n${answer}`; - }) - .join("\n\n"); - - onSendMessage( - `**Here are my answers:**\n\n${contextMessage}\n\nPlease proceed with creating the agent.`, - ); - } - }; - - return ( - - ); - } - - // Render login needed messages - if (isLoginNeeded && message.type === "login_needed") { - // If user is already logged in, show success message instead of auth prompt - if (user) { - return ( -
-
-
-
-
- -
-
-

- Successfully Authenticated -

-

- You're now signed in and ready to continue -

-
-
-
-
-
- ); - } - - // Show auth prompt if not logged in - return ( -
- -
- ); - } - - // Render tool call messages - if (isToolCall && message.type === "tool_call") { - // Check if this tool call is currently streaming - // A tool call is streaming if: - // 1. isStreaming is true - // 2. This is the last tool_call message - // 3. There's no tool_response for this tool call yet - const isToolCallStreaming = - isStreaming && - index >= 0 && - (() => { - // Find the last tool_call index - let lastToolCallIndex = -1; - for (let i = messages.length - 1; i >= 0; i--) { - if (messages[i].type === "tool_call") { - lastToolCallIndex = i; - break; - } - } - // Check if this is the last tool_call and there's no response yet - if (index === lastToolCallIndex) { - // Check if there's a tool_response for this tool call - const hasResponse = messages - .slice(index + 1) - .some( - (msg) => - msg.type === "tool_response" && msg.toolId === message.toolId, - ); - return !hasResponse; - } - return false; - })(); - - return ( -
- -
- ); - } - - // Render no_results messages - use dedicated component, not ToolResponseMessage - if (message.type === "no_results") { - return ( -
- -
- ); - } - - // Render agent_carousel messages - use dedicated component, not ToolResponseMessage - if (message.type === "agent_carousel") { - return ( -
- -
- ); - } - - // Render execution_started messages - use dedicated component, not ToolResponseMessage - if (message.type === "execution_started") { - return ( -
- -
- ); - } - - // Render operation_started messages (long-running background operations) - if (isOperationStarted && message.type === "operation_started") { - return ( - - ); - } - - // Render operation_pending messages (operations in progress when refreshing) - if (isOperationPending && message.type === "operation_pending") { - return ( - - ); - } - - // Render operation_in_progress messages (duplicate request while operation running) - if (isOperationInProgress && message.type === "operation_in_progress") { - return ( - - ); - } - - // Render tool response messages (but skip agent_output if it's being rendered inside assistant message) - if (isToolResponse && message.type === "tool_response") { - return ( -
- -
- ); - } - - // Render regular chat messages - if (message.type === "message") { - return ( -
-
-
- {isUser ? ( - - - - ) : ( - - - {agentOutput && agentOutput.type === "tool_response" && ( -
- -
- )} -
- )} -
- {isUser && onSendMessage && ( - - )} - {!isUser && isFinalMessage && !isStreaming && ( - - )} -
-
-
-
- ); - } - - // Fallback for unknown message types - return null; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/useChatMessage.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/useChatMessage.ts deleted file mode 100644 index 6809497a93..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatMessage/useChatMessage.ts +++ /dev/null @@ -1,157 +0,0 @@ -import type { ToolArguments, ToolResult } from "@/types/chat"; -import { formatDistanceToNow } from "date-fns"; - -export type ChatMessageData = - | { - type: "message"; - role: "user" | "assistant" | "system"; - content: string; - timestamp?: string | Date; - } - | { - type: "tool_call"; - toolId: string; - toolName: string; - arguments?: ToolArguments; - timestamp?: string | Date; - } - | { - type: "tool_response"; - toolId: string; - toolName: string; - result: ToolResult; - success?: boolean; - timestamp?: string | Date; - } - | { - type: "login_needed"; - toolName: string; - message: string; - sessionId: string; - agentInfo?: { - graph_id: string; - name: string; - trigger_type: string; - }; - timestamp?: string | Date; - } - | { - type: "credentials_needed"; - toolName: string; - credentials: Array<{ - provider: string; - providerName: string; - credentialTypes: Array< - "api_key" | "oauth2" | "user_password" | "host_scoped" - >; - title: string; - scopes?: string[]; - }>; - message: string; - agentName?: string; - timestamp?: string | Date; - } - | { - type: "no_results"; - toolName: string; - message: string; - suggestions?: string[]; - sessionId?: string; - timestamp?: string | Date; - } - | { - type: "agent_carousel"; - toolId: string; - toolName: string; - agents: Array<{ - id: string; - name: string; - description: string; - version?: number; - image_url?: string; - }>; - totalCount?: number; - timestamp?: string | Date; - } - | { - type: "execution_started"; - toolId: string; - toolName: string; - executionId: string; - agentName?: string; - message?: string; - libraryAgentLink?: string; - timestamp?: string | Date; - } - | { - type: "inputs_needed"; - toolName: string; - agentName?: string; - agentId?: string; - graphVersion?: number; - inputSchema: Record; - credentialsSchema?: Record; - message: string; - timestamp?: string | Date; - } - | { - type: "clarification_needed"; - toolName: string; - questions: Array<{ - question: string; - keyword: string; - example?: string; - }>; - message: string; - sessionId: string; - timestamp?: string | Date; - } - | { - type: "operation_started"; - toolName: string; - toolId: string; - operationId: string; - taskId?: string; // For SSE reconnection - message: string; - timestamp?: string | Date; - } - | { - type: "operation_pending"; - toolName: string; - toolId: string; - operationId: string; - message: string; - timestamp?: string | Date; - } - | { - type: "operation_in_progress"; - toolName: string; - toolCallId: string; - message: string; - timestamp?: string | Date; - }; - -export function useChatMessage(message: ChatMessageData) { - const formattedTimestamp = message.timestamp - ? formatDistanceToNow(new Date(message.timestamp), { addSuffix: true }) - : "Just now"; - - return { - formattedTimestamp, - isUser: message.type === "message" && message.role === "user", - isAssistant: message.type === "message" && message.role === "assistant", - isSystem: message.type === "message" && message.role === "system", - isToolCall: message.type === "tool_call", - isToolResponse: message.type === "tool_response", - isLoginNeeded: message.type === "login_needed", - isCredentialsNeeded: message.type === "credentials_needed", - isNoResults: message.type === "no_results", - isAgentCarousel: message.type === "agent_carousel", - isExecutionStarted: message.type === "execution_started", - isInputsNeeded: message.type === "inputs_needed", - isClarificationNeeded: message.type === "clarification_needed", - isOperationStarted: message.type === "operation_started", - isOperationPending: message.type === "operation_pending", - isOperationInProgress: message.type === "operation_in_progress", - }; -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ExecutionStartedMessage/ExecutionStartedMessage.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ExecutionStartedMessage/ExecutionStartedMessage.tsx deleted file mode 100644 index 1ac3b440e0..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/ExecutionStartedMessage/ExecutionStartedMessage.tsx +++ /dev/null @@ -1,90 +0,0 @@ -import { Button } from "@/components/atoms/Button/Button"; -import { Text } from "@/components/atoms/Text/Text"; -import { cn } from "@/lib/utils"; -import { ArrowSquareOut, CheckCircle, Play } from "@phosphor-icons/react"; - -export interface ExecutionStartedMessageProps { - executionId: string; - agentName?: string; - message?: string; - onViewExecution?: () => void; - className?: string; -} - -export function ExecutionStartedMessage({ - executionId, - agentName, - message = "Agent execution started successfully", - onViewExecution, - className, -}: ExecutionStartedMessageProps) { - return ( -
- {/* Icon & Header */} -
-
- -
-
- - Execution Started - - - {message} - -
-
- - {/* Details */} -
-
- {agentName && ( -
- - Agent: - - - {agentName} - -
- )} -
- - Execution ID: - - - {executionId.slice(0, 16)}... - -
-
-
- - {/* Action Buttons */} - {onViewExecution && ( -
- -
- )} - -
- - - Your agent is now running. You can monitor its progress in the monitor - page. - -
-
- ); -} diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/MarkdownContent/MarkdownContent.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/MarkdownContent/MarkdownContent.tsx deleted file mode 100644 index ecadbe938b..0000000000 --- a/autogpt_platform/frontend/src/components/contextual/Chat/components/MarkdownContent/MarkdownContent.tsx +++ /dev/null @@ -1,349 +0,0 @@ -"use client"; - -import { getGetWorkspaceDownloadFileByIdUrl } from "@/app/api/__generated__/endpoints/workspace/workspace"; -import { cn } from "@/lib/utils"; -import { EyeSlash } from "@phosphor-icons/react"; -import React, { useState } from "react"; -import ReactMarkdown from "react-markdown"; -import remarkGfm from "remark-gfm"; - -interface MarkdownContentProps { - content: string; - className?: string; -} - -interface CodeProps extends React.HTMLAttributes { - children?: React.ReactNode; - className?: string; -} - -interface ListProps extends React.HTMLAttributes { - children?: React.ReactNode; - className?: string; -} - -interface ListItemProps extends React.HTMLAttributes { - children?: React.ReactNode; - className?: string; -} - -interface InputProps extends React.InputHTMLAttributes { - type?: string; -} - -/** - * Converts a workspace:// URL to a proxy URL that routes through Next.js to the backend. - * workspace://abc123 -> /api/proxy/api/workspace/files/abc123/download - * - * Uses the generated API URL helper and routes through the Next.js proxy - * which handles authentication and proper backend routing. - */ -/** - * URL transformer for ReactMarkdown. - * Converts workspace:// URLs to proxy URLs that route through Next.js to the backend. - * workspace://abc123 -> /api/proxy/api/workspace/files/abc123/download - * - * This is needed because ReactMarkdown sanitizes URLs and only allows - * http, https, mailto, and tel protocols by default. - */ -function resolveWorkspaceUrl(src: string): string { - if (src.startsWith("workspace://")) { - // Strip MIME type fragment if present (e.g., workspace://abc123#video/mp4 → abc123) - const withoutPrefix = src.replace("workspace://", ""); - const fileId = withoutPrefix.split("#")[0]; - // Use the generated API URL helper to get the correct path - const apiPath = getGetWorkspaceDownloadFileByIdUrl(fileId); - // Route through the Next.js proxy (same pattern as customMutator for client-side) - return `/api/proxy${apiPath}`; - } - return src; -} - -/** - * Check if the image URL is a workspace file (AI cannot see these yet). - * After URL transformation, workspace files have URLs like /api/proxy/api/workspace/files/... - */ -function isWorkspaceImage(src: string | undefined): boolean { - return src?.includes("/workspace/files/") ?? false; -} - -/** - * Renders a workspace video with controls and an optional "AI cannot see" badge. - */ -function WorkspaceVideo({ - src, - aiCannotSee, -}: { - src: string; - aiCannotSee: boolean; -}) { - return ( - - - {aiCannotSee && ( - - - AI cannot see this video - - )} - - ); -} - -/** - * Custom image component that shows an indicator when the AI cannot see the image. - * Also handles the "video:" alt-text prefix convention to render