-
-
-
Require human approval
-
- The agent will pause and wait for your review before
- continuing
-
+ {shouldShowToggle ? (
+ <>
+ {showHITLToggle && (
+
+
+
+
+ Human-in-the-loop approval
+
+
+ The agent will pause at human-in-the-loop blocks and
+ wait for your review before continuing
+
+
+
+
-
-
-
+ )}
+ {showSensitiveActionToggle && (
+
+
+
+
+ Sensitive action approval
+
+
+ The agent will pause at sensitive action blocks and wait
+ for your review before continuing
+
+
+
+
+
+ )}
+ >
) : (
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view.tsx
index 53a7e8b860..0147c19a5c 100644
--- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view.tsx
+++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view.tsx
@@ -1,8 +1,15 @@
"use client";
-import React, { useCallback, useEffect, useMemo, useState } from "react";
+import React, {
+ useCallback,
+ useContext,
+ useEffect,
+ useMemo,
+ useState,
+} from "react";
import {
CredentialsMetaInput,
+ CredentialsType,
GraphExecutionID,
GraphMeta,
LibraryAgentPreset,
@@ -29,7 +36,11 @@ import {
} from "@/components/__legacy__/ui/icons";
import { Input } from "@/components/__legacy__/ui/input";
import { Button } from "@/components/atoms/Button/Button";
-import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput";
+import { CredentialsGroupedView } from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/CredentialsGroupedView";
+import {
+ findSavedCredentialByProviderAndType,
+ findSavedUserCredentialByProviderAndType,
+} from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/helpers";
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
import {
useToast,
@@ -37,6 +48,7 @@ import {
} from "@/components/molecules/Toast/use-toast";
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
import { cn, isEmpty } from "@/lib/utils";
+import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider";
import { ClockIcon, CopyIcon, InfoIcon } from "@phosphor-icons/react";
import { CalendarClockIcon, Trash2Icon } from "lucide-react";
@@ -90,6 +102,7 @@ export function AgentRunDraftView({
const api = useBackendAPI();
const { toast } = useToast();
const toastOnFail = useToastOnFail();
+ const allProviders = useContext(CredentialsProvidersContext);
const [inputValues, setInputValues] = useState>({});
const [inputCredentials, setInputCredentials] = useState<
@@ -128,6 +141,77 @@ export function AgentRunDraftView({
() => graph.credentials_input_schema.properties,
[graph],
);
+ const credentialFields = useMemo(
+ function getCredentialFields() {
+ return Object.entries(agentCredentialsInputFields);
+ },
+ [agentCredentialsInputFields],
+ );
+ const requiredCredentials = useMemo(
+ function getRequiredCredentials() {
+ return new Set(
+ (graph.credentials_input_schema?.required as string[]) || [],
+ );
+ },
+ [graph.credentials_input_schema?.required],
+ );
+
+ useEffect(
+ function initializeDefaultCredentials() {
+ if (!allProviders) return;
+ if (!graph.credentials_input_schema?.properties) return;
+ if (requiredCredentials.size === 0) return;
+
+ setInputCredentials(function updateCredentials(currentCreds) {
+ const next = { ...currentCreds };
+ let didAdd = false;
+
+ for (const key of requiredCredentials) {
+ if (next[key]) continue;
+ const schema = graph.credentials_input_schema.properties[key];
+ if (!schema) continue;
+
+ const providerNames = schema.credentials_provider || [];
+ const credentialTypes = schema.credentials_types || [];
+ const requiredScopes = schema.credentials_scopes;
+
+ const userCredential = findSavedUserCredentialByProviderAndType(
+ providerNames,
+ credentialTypes,
+ requiredScopes,
+ allProviders,
+ );
+
+ const savedCredential =
+ userCredential ||
+ findSavedCredentialByProviderAndType(
+ providerNames,
+ credentialTypes,
+ requiredScopes,
+ allProviders,
+ );
+
+ if (!savedCredential) continue;
+
+ next[key] = {
+ id: savedCredential.id,
+ provider: savedCredential.provider,
+ type: savedCredential.type as CredentialsType,
+ title: savedCredential.title,
+ };
+ didAdd = true;
+ }
+
+ if (!didAdd) return currentCreds;
+ return next;
+ });
+ },
+ [
+ allProviders,
+ graph.credentials_input_schema?.properties,
+ requiredCredentials,
+ ],
+ );
const [allRequiredInputsAreSet, missingInputs] = useMemo(() => {
const nonEmptyInputs = new Set(
@@ -145,18 +229,35 @@ export function AgentRunDraftView({
);
return [isSuperset, difference];
}, [agentInputSchema.required, inputValues]);
- const [allCredentialsAreSet, missingCredentials] = useMemo(() => {
- const availableCredentials = new Set(Object.keys(inputCredentials));
- const allCredentials = new Set(Object.keys(agentCredentialsInputFields));
- // Backwards-compatible implementation of isSupersetOf and difference
- const isSuperset = Array.from(allCredentials).every((item) =>
- availableCredentials.has(item),
- );
- const difference = Array.from(allCredentials).filter(
- (item) => !availableCredentials.has(item),
- );
- return [isSuperset, difference];
- }, [agentCredentialsInputFields, inputCredentials]);
+ const [allCredentialsAreSet, missingCredentials] = useMemo(
+ function getCredentialStatus() {
+ const missing = Array.from(requiredCredentials).filter((key) => {
+ const cred = inputCredentials[key];
+ return !cred || !cred.id;
+ });
+ return [missing.length === 0, missing];
+ },
+ [requiredCredentials, inputCredentials],
+ );
+ function addChangedCredentials(prev: Set) {
+ const next = new Set(prev);
+ next.add("credentials");
+ return next;
+ }
+
+ function handleCredentialChange(key: string, value?: CredentialsMetaInput) {
+ setInputCredentials(function updateInputCredentials(currentCreds) {
+ const next = { ...currentCreds };
+ if (value === undefined) {
+ delete next[key];
+ return next;
+ }
+ next[key] = value;
+ return next;
+ });
+ setChangedPresetAttributes(addChangedCredentials);
+ }
+
const notifyMissingInputs = useCallback(
(needPresetName: boolean = true) => {
const allMissingFields = (
@@ -649,35 +750,6 @@ export function AgentRunDraftView({
>
)}
- {/* Credentials inputs */}
- {Object.entries(agentCredentialsInputFields).map(
- ([key, inputSubSchema]) => (
- {
- setInputCredentials((obj) => {
- const newObj = { ...obj };
- if (value === undefined) {
- delete newObj[key];
- return newObj;
- }
- return {
- ...obj,
- [key]: value,
- };
- });
- setChangedPresetAttributes((prev) =>
- prev.add("credentials"),
- );
- }}
- />
- ),
- )}
-
{/* Regular inputs */}
{Object.entries(agentInputFields).map(([key, inputSubSchema]) => (
))}
+
+ {/* Credentials inputs */}
+ {credentialFields.length > 0 && (
+
+ )}
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentCard/useLibraryAgentCard.ts b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentCard/useLibraryAgentCard.ts
index 4232847226..87e9e9e9bc 100644
--- a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentCard/useLibraryAgentCard.ts
+++ b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentCard/useLibraryAgentCard.ts
@@ -8,6 +8,8 @@ import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { okData } from "@/app/api/helpers";
import { useToast } from "@/components/molecules/Toast/use-toast";
+import { isLogoutInProgress } from "@/lib/autogpt-server-api/helpers";
+import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { updateFavoriteInQueries } from "./helpers";
interface Props {
@@ -23,10 +25,14 @@ export function useLibraryAgentCard({ agent }: Props) {
const { toast } = useToast();
const queryClient = getQueryClient();
const { mutateAsync: updateLibraryAgent } = usePatchV2UpdateLibraryAgent();
+ const { user, isLoggedIn } = useSupabase();
+ const logoutInProgress = isLogoutInProgress();
const { data: profile } = useGetV2GetUserProfile({
query: {
select: okData,
+ enabled: isLoggedIn && !!user && !logoutInProgress,
+ queryKey: ["/api/store/profile", user?.id],
},
});
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryUploadAgentDialog/LibraryUploadAgentDialog.tsx b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryUploadAgentDialog/LibraryUploadAgentDialog.tsx
index 1a6999721e..436be6f15a 100644
--- a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryUploadAgentDialog/LibraryUploadAgentDialog.tsx
+++ b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryUploadAgentDialog/LibraryUploadAgentDialog.tsx
@@ -2,6 +2,7 @@
import { Button } from "@/components/atoms/Button/Button";
import { FileInput } from "@/components/atoms/FileInput/FileInput";
import { Input } from "@/components/atoms/Input/Input";
+import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import {
Form,
@@ -120,7 +121,7 @@ export default function LibraryUploadAgentDialog() {
>
{isUploading ? (
) : (
diff --git a/autogpt_platform/frontend/src/app/(platform)/login/actions.ts b/autogpt_platform/frontend/src/app/(platform)/login/actions.ts
index 936c879d69..447a25a41d 100644
--- a/autogpt_platform/frontend/src/app/(platform)/login/actions.ts
+++ b/autogpt_platform/frontend/src/app/(platform)/login/actions.ts
@@ -1,10 +1,11 @@
"use server";
+import { getHomepageRoute } from "@/lib/constants";
import BackendAPI from "@/lib/autogpt-server-api";
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
import { loginFormSchema } from "@/types/auth";
import * as Sentry from "@sentry/nextjs";
-import { shouldShowOnboarding } from "../../api/helpers";
+import { getOnboardingStatus } from "../../api/helpers";
export async function login(email: string, password: string) {
try {
@@ -36,11 +37,15 @@ export async function login(email: string, password: string) {
const api = new BackendAPI();
await api.createUser();
- const onboarding = await shouldShowOnboarding();
+ // Get onboarding status from backend (includes chat flag evaluated for this user)
+ const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
+ const next = shouldShowOnboarding
+ ? "/onboarding"
+ : getHomepageRoute(isChatEnabled);
return {
success: true,
- onboarding,
+ next,
};
} catch (err) {
Sentry.captureException(err);
diff --git a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts
index 656e1febc2..e64cc1858d 100644
--- a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts
+++ b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts
@@ -1,6 +1,8 @@
import { useToast } from "@/components/molecules/Toast/use-toast";
+import { getHomepageRoute } from "@/lib/constants";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { environment } from "@/services/environment";
+import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
import { loginFormSchema, LoginProvider } from "@/types/auth";
import { zodResolver } from "@hookform/resolvers/zod";
import { useRouter, useSearchParams } from "next/navigation";
@@ -20,15 +22,17 @@ export function useLoginPage() {
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
const isCloudEnv = environment.isCloud();
+ const isChatEnabled = useGetFlag(Flag.CHAT);
+ const homepageRoute = getHomepageRoute(isChatEnabled);
// Get redirect destination from 'next' query parameter
const nextUrl = searchParams.get("next");
useEffect(() => {
if (isLoggedIn && !isLoggingIn) {
- router.push(nextUrl || "/marketplace");
+ router.push(nextUrl || homepageRoute);
}
- }, [isLoggedIn, isLoggingIn, nextUrl, router]);
+ }, [homepageRoute, isLoggedIn, isLoggingIn, nextUrl, router]);
const form = useForm
>({
resolver: zodResolver(loginFormSchema),
@@ -93,13 +97,8 @@ export function useLoginPage() {
throw new Error(result.error || "Login failed");
}
- if (nextUrl) {
- router.replace(nextUrl);
- } else if (result.onboarding) {
- router.replace("/onboarding");
- } else {
- router.replace("/marketplace");
- }
+ // Prefer URL's next parameter, then use backend-determined route
+ router.replace(nextUrl || result.next || homepageRoute);
} catch (error) {
toast({
title:
diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainMarketplacePage/__tests__/main.test.tsx b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainMarketplacePage/__tests__/main.test.tsx
new file mode 100644
index 0000000000..bee227a7af
--- /dev/null
+++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainMarketplacePage/__tests__/main.test.tsx
@@ -0,0 +1,15 @@
+import { expect, test } from "vitest";
+import { render, screen } from "@/tests/integrations/test-utils";
+import { MainMarkeplacePage } from "../MainMarketplacePage";
+import { server } from "@/mocks/mock-server";
+import { getDeleteV2DeleteStoreSubmissionMockHandler422 } from "@/app/api/__generated__/endpoints/store/store.msw";
+
+// Only for CI testing purpose, will remove it in future PR
+test("MainMarketplacePage", async () => {
+ server.use(getDeleteV2DeleteStoreSubmissionMockHandler422());
+
+ render();
+ expect(
+ await screen.findByText("Featured agents", { exact: false }),
+ ).toBeDefined();
+});
diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/page.tsx
index 260fbc0b52..979b113f55 100644
--- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/page.tsx
+++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/page.tsx
@@ -3,12 +3,14 @@
import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store";
import { ProfileInfoForm } from "@/components/__legacy__/ProfileInfoForm";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
+import { isLogoutInProgress } from "@/lib/autogpt-server-api/helpers";
import { ProfileDetails } from "@/lib/autogpt-server-api/types";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { ProfileLoading } from "./ProfileLoading";
export default function UserProfilePage() {
const { user } = useSupabase();
+ const logoutInProgress = isLogoutInProgress();
const {
data: profile,
@@ -18,7 +20,7 @@ export default function UserProfilePage() {
refetch,
} = useGetV2GetUserProfile({
query: {
- enabled: !!user,
+ enabled: !!user && !logoutInProgress,
select: (res) => {
if (res.status === 200) {
return {
diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/actions.ts b/autogpt_platform/frontend/src/app/(platform)/signup/actions.ts
index 68f7ae10ec..0fbba54b8e 100644
--- a/autogpt_platform/frontend/src/app/(platform)/signup/actions.ts
+++ b/autogpt_platform/frontend/src/app/(platform)/signup/actions.ts
@@ -1,10 +1,11 @@
"use server";
+import { getHomepageRoute } from "@/lib/constants";
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
import { signupFormSchema } from "@/types/auth";
import * as Sentry from "@sentry/nextjs";
import { isWaitlistError, logWaitlistError } from "../../api/auth/utils";
-import { shouldShowOnboarding } from "../../api/helpers";
+import { getOnboardingStatus } from "../../api/helpers";
export async function signup(
email: string,
@@ -57,8 +58,11 @@ export async function signup(
await supabase.auth.setSession(data.session);
}
- const isOnboardingEnabled = await shouldShowOnboarding();
- const next = isOnboardingEnabled ? "/onboarding" : "/";
+ // Get onboarding status from backend (includes chat flag evaluated for this user)
+ const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
+ const next = shouldShowOnboarding
+ ? "/onboarding"
+ : getHomepageRoute(isChatEnabled);
return { success: true, next };
} catch (err) {
diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts
index e6d7c68aef..5fa8c2c159 100644
--- a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts
+++ b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts
@@ -1,6 +1,8 @@
import { useToast } from "@/components/molecules/Toast/use-toast";
+import { getHomepageRoute } from "@/lib/constants";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { environment } from "@/services/environment";
+import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
import { LoginProvider, signupFormSchema } from "@/types/auth";
import { zodResolver } from "@hookform/resolvers/zod";
import { useRouter, useSearchParams } from "next/navigation";
@@ -20,15 +22,17 @@ export function useSignupPage() {
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
const isCloudEnv = environment.isCloud();
+ const isChatEnabled = useGetFlag(Flag.CHAT);
+ const homepageRoute = getHomepageRoute(isChatEnabled);
// Get redirect destination from 'next' query parameter
const nextUrl = searchParams.get("next");
useEffect(() => {
if (isLoggedIn && !isSigningUp) {
- router.push(nextUrl || "/marketplace");
+ router.push(nextUrl || homepageRoute);
}
- }, [isLoggedIn, isSigningUp, nextUrl, router]);
+ }, [homepageRoute, isLoggedIn, isSigningUp, nextUrl, router]);
const form = useForm>({
resolver: zodResolver(signupFormSchema),
@@ -129,7 +133,7 @@ export function useSignupPage() {
}
// Prefer the URL's next parameter, then result.next (for onboarding), then default
- const redirectTo = nextUrl || result.next || "/";
+ const redirectTo = nextUrl || result.next || homepageRoute;
router.replace(redirectTo);
} catch (error) {
setIsLoading(false);
diff --git a/autogpt_platform/frontend/src/app/api/helpers.ts b/autogpt_platform/frontend/src/app/api/helpers.ts
index e9a708ba4c..c2104d231a 100644
--- a/autogpt_platform/frontend/src/app/api/helpers.ts
+++ b/autogpt_platform/frontend/src/app/api/helpers.ts
@@ -175,9 +175,12 @@ export async function resolveResponse<
return res.data;
}
-export async function shouldShowOnboarding() {
- const isEnabled = await resolveResponse(getV1IsOnboardingEnabled());
+export async function getOnboardingStatus() {
+ const status = await resolveResponse(getV1IsOnboardingEnabled());
const onboarding = await resolveResponse(getV1OnboardingState());
const isCompleted = onboarding.completedSteps.includes("CONGRATS");
- return isEnabled && !isCompleted;
+ return {
+ shouldShowOnboarding: status.is_onboarding_enabled && !isCompleted,
+ isChatEnabled: status.is_chat_enabled,
+ };
}
diff --git a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts
index 4578ac03fe..3c9eda7785 100644
--- a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts
+++ b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts
@@ -4,12 +4,12 @@ import {
getServerAuthToken,
} from "@/lib/autogpt-server-api/helpers";
-import { transformDates } from "./date-transformer";
-import { environment } from "@/services/environment";
import {
IMPERSONATION_HEADER_NAME,
IMPERSONATION_STORAGE_KEY,
} from "@/lib/constants";
+import { environment } from "@/services/environment";
+import { transformDates } from "./date-transformer";
const FRONTEND_BASE_URL =
process.env.NEXT_PUBLIC_FRONTEND_BASE_URL || "http://localhost:3000";
diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json
index fc4e737651..6692c30e72 100644
--- a/autogpt_platform/frontend/src/app/api/openapi.json
+++ b/autogpt_platform/frontend/src/app/api/openapi.json
@@ -1022,7 +1022,7 @@
"get": {
"tags": ["v2", "chat", "chat"],
"summary": "Get Session",
- "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.",
+ "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session, or None if not found.",
"operationId": "getV2GetSession",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
@@ -3339,7 +3339,7 @@
"get": {
"tags": ["v2", "library", "private"],
"summary": "List Library Agents",
- "description": "Get all agents in the user's library (both created and saved).\n\nArgs:\n user_id: ID of the authenticated user.\n search_term: Optional search term to filter agents by name/description.\n filter_by: List of filters to apply (favorites, created by user).\n sort_by: List of sorting criteria (created date, updated date).\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.",
+ "description": "Get all agents in the user's library (both created and saved).",
"operationId": "getV2List library agents",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
@@ -3394,7 +3394,7 @@
],
"responses": {
"200": {
- "description": "List of library agents",
+ "description": "Successful Response",
"content": {
"application/json": {
"schema": {
@@ -3413,17 +3413,13 @@
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
- },
- "500": {
- "description": "Server error",
- "content": { "application/json": {} }
}
}
},
"post": {
"tags": ["v2", "library", "private"],
"summary": "Add Marketplace Agent",
- "description": "Add an agent from the marketplace to the user's library.\n\nArgs:\n store_listing_version_id: ID of the store listing version to add.\n user_id: ID of the authenticated user.\n\nReturns:\n library_model.LibraryAgent: Agent added to the library\n\nRaises:\n HTTPException(404): If the listing version is not found.\n HTTPException(500): If a server/database error occurs.",
+ "description": "Add an agent from the marketplace to the user's library.",
"operationId": "postV2Add marketplace agent",
"security": [{ "HTTPBearerJWT": [] }],
"requestBody": {
@@ -3438,7 +3434,7 @@
},
"responses": {
"201": {
- "description": "Agent added successfully",
+ "description": "Successful Response",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/LibraryAgent" }
@@ -3448,7 +3444,6 @@
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
- "404": { "description": "Store listing version not found" },
"422": {
"description": "Validation Error",
"content": {
@@ -3456,8 +3451,7 @@
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
- },
- "500": { "description": "Server error" }
+ }
}
}
},
@@ -3511,7 +3505,7 @@
"get": {
"tags": ["v2", "library", "private"],
"summary": "List Favorite Library Agents",
- "description": "Get all favorite agents in the user's library.\n\nArgs:\n user_id: ID of the authenticated user.\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing favorite agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.",
+ "description": "Get all favorite agents in the user's library.",
"operationId": "getV2List favorite library agents",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
@@ -3563,10 +3557,6 @@
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
- },
- "500": {
- "description": "Server error",
- "content": { "application/json": {} }
}
}
}
@@ -3588,7 +3578,7 @@
],
"responses": {
"200": {
- "description": "Library agent found",
+ "description": "Successful Response",
"content": {
"application/json": {
"schema": {
@@ -3604,7 +3594,6 @@
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
- "404": { "description": "Agent not found" },
"422": {
"description": "Validation Error",
"content": {
@@ -3620,7 +3609,7 @@
"delete": {
"tags": ["v2", "library", "private"],
"summary": "Delete Library Agent",
- "description": "Soft-delete the specified library agent.\n\nArgs:\n library_agent_id: ID of the library agent to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.\n\nRaises:\n HTTPException(404): If the agent does not exist.\n HTTPException(500): If a server/database error occurs.",
+ "description": "Soft-delete the specified library agent.",
"operationId": "deleteV2Delete library agent",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
@@ -3636,11 +3625,9 @@
"description": "Successful Response",
"content": { "application/json": { "schema": {} } }
},
- "204": { "description": "Agent deleted successfully" },
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
- "404": { "description": "Agent not found" },
"422": {
"description": "Validation Error",
"content": {
@@ -3648,8 +3635,7 @@
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
- },
- "500": { "description": "Server error" }
+ }
}
},
"get": {
@@ -3690,7 +3676,7 @@
"patch": {
"tags": ["v2", "library", "private"],
"summary": "Update Library Agent",
- "description": "Update the library agent with the given fields.\n\nArgs:\n library_agent_id: ID of the library agent to update.\n payload: Fields to update (auto_update_version, is_favorite, etc.).\n user_id: ID of the authenticated user.\n\nRaises:\n HTTPException(500): If a server/database error occurs.",
+ "description": "Update the library agent with the given fields.",
"operationId": "patchV2Update library agent",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
@@ -3713,7 +3699,7 @@
},
"responses": {
"200": {
- "description": "Agent updated successfully",
+ "description": "Successful Response",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/LibraryAgent" }
@@ -3730,8 +3716,7 @@
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
- },
- "500": { "description": "Server error" }
+ }
}
}
},
@@ -4540,8 +4525,7 @@
"content": {
"application/json": {
"schema": {
- "type": "boolean",
- "title": "Response Getv1Is Onboarding Enabled"
+ "$ref": "#/components/schemas/OnboardingStatusResponse"
}
}
}
@@ -4594,6 +4578,7 @@
"AGENT_NEW_RUN",
"AGENT_INPUT",
"CONGRATS",
+ "VISIT_COPILOT",
"MARKETPLACE_VISIT",
"BUILDER_OPEN"
],
@@ -5927,6 +5912,40 @@
}
}
},
+ "/api/workspace/files/{file_id}/download": {
+ "get": {
+ "tags": ["workspace"],
+ "summary": "Download file by ID",
+ "description": "Download a file by its ID.\n\nReturns the file content directly or redirects to a signed URL for GCS.",
+ "operationId": "getWorkspaceDownload file by id",
+ "security": [{ "HTTPBearerJWT": [] }],
+ "parameters": [
+ {
+ "name": "file_id",
+ "in": "path",
+ "required": true,
+ "schema": { "type": "string", "title": "File Id" }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Successful Response",
+ "content": { "application/json": { "schema": {} } }
+ },
+ "401": {
+ "$ref": "#/components/responses/HTTP401NotAuthenticatedError"
+ },
+ "422": {
+ "description": "Validation Error",
+ "content": {
+ "application/json": {
+ "schema": { "$ref": "#/components/schemas/HTTPValidationError" }
+ }
+ }
+ }
+ }
+ }
+ },
"/health": {
"get": {
"tags": ["health"],
@@ -6383,6 +6402,11 @@
"title": "Has Human In The Loop",
"readOnly": true
},
+ "has_sensitive_action": {
+ "type": "boolean",
+ "title": "Has Sensitive Action",
+ "readOnly": true
+ },
"trigger_setup_info": {
"anyOf": [
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
@@ -6399,6 +6423,7 @@
"output_schema",
"has_external_trigger",
"has_human_in_the_loop",
+ "has_sensitive_action",
"trigger_setup_info"
],
"title": "BaseGraph"
@@ -7629,6 +7654,11 @@
"title": "Has Human In The Loop",
"readOnly": true
},
+ "has_sensitive_action": {
+ "type": "boolean",
+ "title": "Has Sensitive Action",
+ "readOnly": true
+ },
"trigger_setup_info": {
"anyOf": [
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
@@ -7652,6 +7682,7 @@
"output_schema",
"has_external_trigger",
"has_human_in_the_loop",
+ "has_sensitive_action",
"trigger_setup_info",
"credentials_input_schema"
],
@@ -7730,6 +7761,11 @@
"title": "Has Human In The Loop",
"readOnly": true
},
+ "has_sensitive_action": {
+ "type": "boolean",
+ "title": "Has Sensitive Action",
+ "readOnly": true
+ },
"trigger_setup_info": {
"anyOf": [
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
@@ -7754,6 +7790,7 @@
"output_schema",
"has_external_trigger",
"has_human_in_the_loop",
+ "has_sensitive_action",
"trigger_setup_info",
"credentials_input_schema"
],
@@ -7762,8 +7799,14 @@
"GraphSettings": {
"properties": {
"human_in_the_loop_safe_mode": {
- "anyOf": [{ "type": "boolean" }, { "type": "null" }],
- "title": "Human In The Loop Safe Mode"
+ "type": "boolean",
+ "title": "Human In The Loop Safe Mode",
+ "default": true
+ },
+ "sensitive_action_safe_mode": {
+ "type": "boolean",
+ "title": "Sensitive Action Safe Mode",
+ "default": false
}
},
"type": "object",
@@ -7921,6 +7964,16 @@
"title": "Has External Trigger",
"description": "Whether the agent has an external trigger (e.g. webhook) node"
},
+ "has_human_in_the_loop": {
+ "type": "boolean",
+ "title": "Has Human In The Loop",
+ "description": "Whether the agent has human-in-the-loop blocks"
+ },
+ "has_sensitive_action": {
+ "type": "boolean",
+ "title": "Has Sensitive Action",
+ "description": "Whether the agent has sensitive action blocks"
+ },
"trigger_setup_info": {
"anyOf": [
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
@@ -7967,6 +8020,8 @@
"output_schema",
"credentials_input_schema",
"has_external_trigger",
+ "has_human_in_the_loop",
+ "has_sensitive_action",
"new_output",
"can_access_graph",
"is_latest_version",
@@ -8708,6 +8763,19 @@
"title": "OAuthApplicationPublicInfo",
"description": "Public information about an OAuth application (for consent screen)"
},
+ "OnboardingStatusResponse": {
+ "properties": {
+ "is_onboarding_enabled": {
+ "type": "boolean",
+ "title": "Is Onboarding Enabled"
+ },
+ "is_chat_enabled": { "type": "boolean", "title": "Is Chat Enabled" }
+ },
+ "type": "object",
+ "required": ["is_onboarding_enabled", "is_chat_enabled"],
+ "title": "OnboardingStatusResponse",
+ "description": "Response for onboarding status check."
+ },
"OnboardingStep": {
"type": "string",
"enum": [
@@ -8718,6 +8786,7 @@
"AGENT_NEW_RUN",
"AGENT_INPUT",
"CONGRATS",
+ "VISIT_COPILOT",
"GET_RESULTS",
"MARKETPLACE_VISIT",
"MARKETPLACE_ADD_AGENT",
@@ -8773,6 +8842,12 @@
"title": "Node Exec Id",
"description": "Node execution ID (primary key)"
},
+ "node_id": {
+ "type": "string",
+ "title": "Node Id",
+ "description": "Node definition ID (for grouping)",
+ "default": ""
+ },
"user_id": {
"type": "string",
"title": "User Id",
@@ -8872,7 +8947,7 @@
"created_at"
],
"title": "PendingHumanReviewModel",
- "description": "Response model for pending human review data.\n\nRepresents a human review request that is awaiting user action.\nContains all necessary information for a user to review and approve\nor reject data from a Human-in-the-Loop block execution.\n\nAttributes:\n id: Unique identifier for the review record\n user_id: ID of the user who must perform the review\n node_exec_id: ID of the node execution that created this review\n graph_exec_id: ID of the graph execution containing the node\n graph_id: ID of the graph template being executed\n graph_version: Version number of the graph template\n payload: The actual data payload awaiting review\n instructions: Instructions or message for the reviewer\n editable: Whether the reviewer can edit the data\n status: Current review status (WAITING, APPROVED, or REJECTED)\n review_message: Optional message from the reviewer\n created_at: Timestamp when review was created\n updated_at: Timestamp when review was last modified\n reviewed_at: Timestamp when review was completed (if applicable)"
+ "description": "Response model for pending human review data.\n\nRepresents a human review request that is awaiting user action.\nContains all necessary information for a user to review and approve\nor reject data from a Human-in-the-Loop block execution.\n\nAttributes:\n id: Unique identifier for the review record\n user_id: ID of the user who must perform the review\n node_exec_id: ID of the node execution that created this review\n node_id: ID of the node definition (for grouping reviews from same node)\n graph_exec_id: ID of the graph execution containing the node\n graph_id: ID of the graph template being executed\n graph_version: Version number of the graph template\n payload: The actual data payload awaiting review\n instructions: Instructions or message for the reviewer\n editable: Whether the reviewer can edit the data\n status: Current review status (WAITING, APPROVED, or REJECTED)\n review_message: Optional message from the reviewer\n created_at: Timestamp when review was created\n updated_at: Timestamp when review was last modified\n reviewed_at: Timestamp when review was completed (if applicable)"
},
"PostmarkBounceEnum": {
"type": "integer",
@@ -9375,6 +9450,12 @@
],
"title": "Reviewed Data",
"description": "Optional edited data (ignored if approved=False)"
+ },
+ "auto_approve_future": {
+ "type": "boolean",
+ "title": "Auto Approve Future",
+ "description": "If true and this review is approved, future executions of this same block (node) will be automatically approved. This only affects approved reviews.",
+ "default": false
}
},
"type": "object",
@@ -9394,7 +9475,7 @@
"type": "object",
"required": ["reviews"],
"title": "ReviewRequest",
- "description": "Request model for processing ALL pending reviews for an execution.\n\nThis request must include ALL pending reviews for a graph execution.\nEach review will be either approved (with optional data modifications)\nor rejected (data ignored). The execution will resume only after ALL reviews are processed."
+ "description": "Request model for processing ALL pending reviews for an execution.\n\nThis request must include ALL pending reviews for a graph execution.\nEach review will be either approved (with optional data modifications)\nor rejected (data ignored). The execution will resume only after ALL reviews are processed.\n\nEach review item can individually specify whether to auto-approve future executions\nof the same block via the `auto_approve_future` field on ReviewItem."
},
"ReviewResponse": {
"properties": {
diff --git a/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts b/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts
index 293c406373..442bd77e0f 100644
--- a/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts
+++ b/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts
@@ -1,5 +1,6 @@
import {
ApiError,
+ getServerAuthToken,
makeAuthenticatedFileUpload,
makeAuthenticatedRequest,
} from "@/lib/autogpt-server-api/helpers";
@@ -15,6 +16,69 @@ function buildBackendUrl(path: string[], queryString: string): string {
return `${environment.getAGPTServerBaseUrl()}/${backendPath}${queryString}`;
}
+/**
+ * Check if this is a workspace file download request that needs binary response handling.
+ */
+function isWorkspaceDownloadRequest(path: string[]): boolean {
+ // Match pattern: api/workspace/files/{id}/download (5 segments)
+ return (
+ path.length == 5 &&
+ path[0] === "api" &&
+ path[1] === "workspace" &&
+ path[2] === "files" &&
+ path[path.length - 1] === "download"
+ );
+}
+
+/**
+ * Handle workspace file download requests with proper binary response streaming.
+ */
+async function handleWorkspaceDownload(
+ req: NextRequest,
+ backendUrl: string,
+): Promise {
+ const token = await getServerAuthToken();
+
+ const headers: Record = {};
+ if (token && token !== "no-token-found") {
+ headers["Authorization"] = `Bearer ${token}`;
+ }
+
+ const response = await fetch(backendUrl, {
+ method: "GET",
+ headers,
+ redirect: "follow", // Follow redirects to signed URLs
+ });
+
+ if (!response.ok) {
+ return NextResponse.json(
+ { error: `Failed to download file: ${response.statusText}` },
+ { status: response.status },
+ );
+ }
+
+ // Get the content type from the backend response
+ const contentType =
+ response.headers.get("Content-Type") || "application/octet-stream";
+ const contentDisposition = response.headers.get("Content-Disposition");
+
+ // Stream the response body
+ const responseHeaders: Record = {
+ "Content-Type": contentType,
+ };
+
+ if (contentDisposition) {
+ responseHeaders["Content-Disposition"] = contentDisposition;
+ }
+
+ // Return the binary content
+ const arrayBuffer = await response.arrayBuffer();
+ return new NextResponse(arrayBuffer, {
+ status: 200,
+ headers: responseHeaders,
+ });
+}
+
async function handleJsonRequest(
req: NextRequest,
method: string,
@@ -180,6 +244,11 @@ async function handler(
};
try {
+ // Handle workspace file downloads separately (binary response)
+ if (method === "GET" && isWorkspaceDownloadRequest(path)) {
+ return await handleWorkspaceDownload(req, backendUrl);
+ }
+
if (method === "GET" || method === "DELETE") {
responseBody = await handleGetDeleteRequest(method, backendUrl, req);
} else if (contentType?.includes("application/json")) {
diff --git a/autogpt_platform/frontend/src/app/api/transcribe/route.ts b/autogpt_platform/frontend/src/app/api/transcribe/route.ts
new file mode 100644
index 0000000000..10c182cdfa
--- /dev/null
+++ b/autogpt_platform/frontend/src/app/api/transcribe/route.ts
@@ -0,0 +1,77 @@
+import { getServerAuthToken } from "@/lib/autogpt-server-api/helpers";
+import { NextRequest, NextResponse } from "next/server";
+
+const WHISPER_API_URL = "https://api.openai.com/v1/audio/transcriptions";
+const MAX_FILE_SIZE = 25 * 1024 * 1024; // 25MB - Whisper's limit
+
+function getExtensionFromMimeType(mimeType: string): string {
+ const subtype = mimeType.split("/")[1]?.split(";")[0];
+ return subtype || "webm";
+}
+
+export async function POST(request: NextRequest) {
+ const token = await getServerAuthToken();
+
+ if (!token || token === "no-token-found") {
+ return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
+ }
+
+ const apiKey = process.env.OPENAI_API_KEY;
+
+ if (!apiKey) {
+ return NextResponse.json(
+ { error: "OpenAI API key not configured" },
+ { status: 401 },
+ );
+ }
+
+ try {
+ const formData = await request.formData();
+ const audioFile = formData.get("audio");
+
+ if (!audioFile || !(audioFile instanceof Blob)) {
+ return NextResponse.json(
+ { error: "No audio file provided" },
+ { status: 400 },
+ );
+ }
+
+ if (audioFile.size > MAX_FILE_SIZE) {
+ return NextResponse.json(
+ { error: "File too large. Maximum size is 25MB." },
+ { status: 413 },
+ );
+ }
+
+ const ext = getExtensionFromMimeType(audioFile.type);
+ const whisperFormData = new FormData();
+ whisperFormData.append("file", audioFile, `recording.${ext}`);
+ whisperFormData.append("model", "whisper-1");
+
+ const response = await fetch(WHISPER_API_URL, {
+ method: "POST",
+ headers: {
+ Authorization: `Bearer ${apiKey}`,
+ },
+ body: whisperFormData,
+ });
+
+ if (!response.ok) {
+ const errorData = await response.json().catch(() => ({}));
+ console.error("Whisper API error:", errorData);
+ return NextResponse.json(
+ { error: errorData.error?.message || "Transcription failed" },
+ { status: response.status },
+ );
+ }
+
+ const result = await response.json();
+ return NextResponse.json({ text: result.text });
+ } catch (error) {
+ console.error("Transcription error:", error);
+ return NextResponse.json(
+ { error: "Failed to process audio" },
+ { status: 500 },
+ );
+ }
+}
diff --git a/autogpt_platform/frontend/src/app/globals.css b/autogpt_platform/frontend/src/app/globals.css
index 0625c26082..1f782f753b 100644
--- a/autogpt_platform/frontend/src/app/globals.css
+++ b/autogpt_platform/frontend/src/app/globals.css
@@ -141,52 +141,6 @@
}
}
-@keyframes shimmer {
- 0% {
- background-position: -200% 0;
- }
- 100% {
- background-position: 200% 0;
- }
-}
-
-@keyframes l3 {
- 25% {
- background-position:
- 0 0,
- 100% 100%,
- 100% calc(100% - 5px);
- }
- 50% {
- background-position:
- 0 100%,
- 100% 100%,
- 0 calc(100% - 5px);
- }
- 75% {
- background-position:
- 0 100%,
- 100% 0,
- 100% 5px;
- }
-}
-
-.loader {
- width: 80px;
- height: 70px;
- border: 5px solid rgb(241 245 249);
- padding: 0 8px;
- box-sizing: border-box;
- background:
- linear-gradient(rgb(15 23 42) 0 0) 0 0/8px 20px,
- linear-gradient(rgb(15 23 42) 0 0) 100% 0/8px 20px,
- radial-gradient(farthest-side, rgb(15 23 42) 90%, #0000) 0 5px/8px 8px
- content-box,
- transparent;
- background-repeat: no-repeat;
- animation: l3 2s infinite linear;
-}
-
input[type="number"]::-webkit-outer-spin-button,
input[type="number"]::-webkit-inner-spin-button {
-webkit-appearance: none;
diff --git a/autogpt_platform/frontend/src/app/page.tsx b/autogpt_platform/frontend/src/app/page.tsx
index b499a40d71..dbfab49469 100644
--- a/autogpt_platform/frontend/src/app/page.tsx
+++ b/autogpt_platform/frontend/src/app/page.tsx
@@ -1,5 +1,27 @@
-import { redirect } from "next/navigation";
+"use client";
+
+import { getHomepageRoute } from "@/lib/constants";
+import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
+import { useRouter } from "next/navigation";
+import { useEffect } from "react";
export default function Page() {
- redirect("/marketplace");
+ const isChatEnabled = useGetFlag(Flag.CHAT);
+ const router = useRouter();
+ const homepageRoute = getHomepageRoute(isChatEnabled);
+ const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
+ const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
+ const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
+ const isFlagReady =
+ !isLaunchDarklyConfigured || typeof isChatEnabled === "boolean";
+
+ useEffect(
+ function redirectToHomepage() {
+ if (!isFlagReady) return;
+ router.replace(homepageRoute);
+ },
+ [homepageRoute, isFlagReady, router],
+ );
+
+ return null;
}
diff --git a/autogpt_platform/frontend/src/app/providers.tsx b/autogpt_platform/frontend/src/app/providers.tsx
index 8ea199abc8..267814e7c2 100644
--- a/autogpt_platform/frontend/src/app/providers.tsx
+++ b/autogpt_platform/frontend/src/app/providers.tsx
@@ -6,28 +6,40 @@ import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
import { getQueryClient } from "@/lib/react-query/queryClient";
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
+import {
+ PostHogPageViewTracker,
+ PostHogProvider,
+ PostHogUserTracker,
+} from "@/providers/posthog/posthog-provider";
import { LaunchDarklyProvider } from "@/services/feature-flags/feature-flag-provider";
import { QueryClientProvider } from "@tanstack/react-query";
import { ThemeProvider, ThemeProviderProps } from "next-themes";
import { NuqsAdapter } from "nuqs/adapters/next/app";
+import { Suspense } from "react";
export function Providers({ children, ...props }: ThemeProviderProps) {
const queryClient = getQueryClient();
return (
-
-
-
-
-
-
- {children}
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+ {children}
+
+
+
+
+
+
);
diff --git a/autogpt_platform/frontend/src/components/atoms/Badge/Badge.test.tsx b/autogpt_platform/frontend/src/components/atoms/Badge/Badge.test.tsx
deleted file mode 100644
index cd8531375b..0000000000
--- a/autogpt_platform/frontend/src/components/atoms/Badge/Badge.test.tsx
+++ /dev/null
@@ -1,81 +0,0 @@
-// import { render, screen } from "@testing-library/react";
-// import { describe, expect, it } from "vitest";
-// import { Badge } from "./Badge";
-
-// describe("Badge Component", () => {
-// it("renders badge with content", () => {
-// render(Success);
-
-// expect(screen.getByText("Success")).toBeInTheDocument();
-// });
-
-// it("applies correct variant styles", () => {
-// const { rerender } = render(Success);
-// let badge = screen.getByText("Success");
-// expect(badge).toHaveClass("bg-green-100", "text-green-800");
-
-// rerender(Error);
-// badge = screen.getByText("Error");
-// expect(badge).toHaveClass("bg-red-100", "text-red-800");
-
-// rerender(Info);
-// badge = screen.getByText("Info");
-// expect(badge).toHaveClass("bg-slate-100", "text-slate-800");
-// });
-
-// it("applies custom className", () => {
-// render(
-//
-// Success
-// ,
-// );
-
-// const badge = screen.getByText("Success");
-// expect(badge).toHaveClass("custom-class");
-// });
-
-// it("renders as span element", () => {
-// render(Success);
-
-// const badge = screen.getByText("Success");
-// expect(badge.tagName).toBe("SPAN");
-// });
-
-// it("renders children correctly", () => {
-// render(
-//
-// Custom Content
-// ,
-// );
-
-// expect(screen.getByText("Custom")).toBeInTheDocument();
-// expect(screen.getByText("Content")).toBeInTheDocument();
-// });
-
-// it("supports all badge variants", () => {
-// const variants = ["success", "error", "info"] as const;
-
-// variants.forEach((variant) => {
-// const { unmount } = render(
-//
-// {variant}
-// ,
-// );
-
-// expect(screen.getByTestId(`badge-${variant}`)).toBeInTheDocument();
-// unmount();
-// });
-// });
-
-// it("handles long text content", () => {
-// render(
-//
-// Very long text that should be handled properly by the component
-// ,
-// );
-
-// const badge = screen.getByText(/Very long text/);
-// expect(badge).toBeInTheDocument();
-// expect(badge).toHaveClass("overflow-hidden", "text-ellipsis");
-// });
-// });
diff --git a/autogpt_platform/frontend/src/components/atoms/Skeleton/Skeleton.tsx b/autogpt_platform/frontend/src/components/atoms/Skeleton/Skeleton.tsx
new file mode 100644
index 0000000000..4789e281ce
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/atoms/Skeleton/Skeleton.tsx
@@ -0,0 +1,14 @@
+import { cn } from "@/lib/utils";
+
+interface Props extends React.HTMLAttributes {
+ className?: string;
+}
+
+export function Skeleton({ className, ...props }: Props) {
+ return (
+
+ );
+}
diff --git a/autogpt_platform/frontend/src/components/atoms/Skeleton/skeleton.stories.tsx b/autogpt_platform/frontend/src/components/atoms/Skeleton/skeleton.stories.tsx
index 04d87a6e0e..69bb7c3440 100644
--- a/autogpt_platform/frontend/src/components/atoms/Skeleton/skeleton.stories.tsx
+++ b/autogpt_platform/frontend/src/components/atoms/Skeleton/skeleton.stories.tsx
@@ -1,4 +1,4 @@
-import { Skeleton } from "@/components/__legacy__/ui/skeleton";
+import { Skeleton } from "./Skeleton";
import type { Meta, StoryObj } from "@storybook/nextjs";
const meta: Meta = {
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx
new file mode 100644
index 0000000000..ada8c26231
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/Chat.tsx
@@ -0,0 +1,96 @@
+"use client";
+
+import { useCopilotSessionId } from "@/app/(platform)/copilot/useCopilotSessionId";
+import { useCopilotStore } from "@/app/(platform)/copilot/copilot-page-store";
+import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
+import { Text } from "@/components/atoms/Text/Text";
+import { cn } from "@/lib/utils";
+import { useEffect, useRef } from "react";
+import { ChatContainer } from "./components/ChatContainer/ChatContainer";
+import { ChatErrorState } from "./components/ChatErrorState/ChatErrorState";
+import { useChat } from "./useChat";
+
+export interface ChatProps {
+ className?: string;
+ initialPrompt?: string;
+ onSessionNotFound?: () => void;
+ onStreamingChange?: (isStreaming: boolean) => void;
+}
+
+export function Chat({
+ className,
+ initialPrompt,
+ onSessionNotFound,
+ onStreamingChange,
+}: ChatProps) {
+ const { urlSessionId } = useCopilotSessionId();
+ const hasHandledNotFoundRef = useRef(false);
+ const isSwitchingSession = useCopilotStore((s) => s.isSwitchingSession);
+ const {
+ messages,
+ isLoading,
+ isCreating,
+ error,
+ isSessionNotFound,
+ sessionId,
+ createSession,
+ showLoader,
+ startPollingForOperation,
+ } = useChat({ urlSessionId });
+
+ useEffect(() => {
+ if (!onSessionNotFound) return;
+ if (!urlSessionId) return;
+ if (!isSessionNotFound || isLoading || isCreating) return;
+ if (hasHandledNotFoundRef.current) return;
+ hasHandledNotFoundRef.current = true;
+ onSessionNotFound();
+ }, [
+ onSessionNotFound,
+ urlSessionId,
+ isSessionNotFound,
+ isLoading,
+ isCreating,
+ ]);
+
+ const shouldShowLoader =
+ (showLoader && (isLoading || isCreating)) || isSwitchingSession;
+
+ return (
+
+ {/* Main Content */}
+
+ {/* Loading State */}
+ {shouldShowLoader && (
+
+
+
+
+ {isSwitchingSession
+ ? "Switching chat..."
+ : "Loading your chat..."}
+
+
+
+ )}
+
+ {/* Error State */}
+ {error && !isLoading && !isSwitchingSession && (
+
+ )}
+
+ {/* Session Content */}
+ {sessionId && !isLoading && !error && !isSwitchingSession && (
+
+ )}
+
+
+ );
+}
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts b/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts
new file mode 100644
index 0000000000..8229630e5d
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/chat-store.ts
@@ -0,0 +1,289 @@
+"use client";
+
+import { create } from "zustand";
+import type {
+ ActiveStream,
+ StreamChunk,
+ StreamCompleteCallback,
+ StreamResult,
+ StreamStatus,
+} from "./chat-types";
+import { executeStream } from "./stream-executor";
+
+const COMPLETED_STREAM_TTL = 5 * 60 * 1000; // 5 minutes
+
+interface ChatStoreState {
+ activeStreams: Map;
+ completedStreams: Map;
+ activeSessions: Set;
+ streamCompleteCallbacks: Set;
+}
+
+interface ChatStoreActions {
+ startStream: (
+ sessionId: string,
+ message: string,
+ isUserMessage: boolean,
+ context?: { url: string; content: string },
+ onChunk?: (chunk: StreamChunk) => void,
+ ) => Promise;
+ stopStream: (sessionId: string) => void;
+ subscribeToStream: (
+ sessionId: string,
+ onChunk: (chunk: StreamChunk) => void,
+ skipReplay?: boolean,
+ ) => () => void;
+ getStreamStatus: (sessionId: string) => StreamStatus;
+ getCompletedStream: (sessionId: string) => StreamResult | undefined;
+ clearCompletedStream: (sessionId: string) => void;
+ isStreaming: (sessionId: string) => boolean;
+ registerActiveSession: (sessionId: string) => void;
+ unregisterActiveSession: (sessionId: string) => void;
+ isSessionActive: (sessionId: string) => boolean;
+ onStreamComplete: (callback: StreamCompleteCallback) => () => void;
+}
+
+type ChatStore = ChatStoreState & ChatStoreActions;
+
+function notifyStreamComplete(
+ callbacks: Set,
+ sessionId: string,
+) {
+ for (const callback of callbacks) {
+ try {
+ callback(sessionId);
+ } catch (err) {
+ console.warn("[ChatStore] Stream complete callback error:", err);
+ }
+ }
+}
+
+function cleanupExpiredStreams(
+ completedStreams: Map,
+): Map {
+ const now = Date.now();
+ const cleaned = new Map(completedStreams);
+ for (const [sessionId, result] of cleaned) {
+ if (now - result.completedAt > COMPLETED_STREAM_TTL) {
+ cleaned.delete(sessionId);
+ }
+ }
+ return cleaned;
+}
+
+export const useChatStore = create((set, get) => ({
+ activeStreams: new Map(),
+ completedStreams: new Map(),
+ activeSessions: new Set(),
+ streamCompleteCallbacks: new Set(),
+
+ startStream: async function startStream(
+ sessionId,
+ message,
+ isUserMessage,
+ context,
+ onChunk,
+ ) {
+ const state = get();
+ const newActiveStreams = new Map(state.activeStreams);
+ let newCompletedStreams = new Map(state.completedStreams);
+ const callbacks = state.streamCompleteCallbacks;
+
+ const existingStream = newActiveStreams.get(sessionId);
+ if (existingStream) {
+ existingStream.abortController.abort();
+ const normalizedStatus =
+ existingStream.status === "streaming"
+ ? "completed"
+ : existingStream.status;
+ const result: StreamResult = {
+ sessionId,
+ status: normalizedStatus,
+ chunks: existingStream.chunks,
+ completedAt: Date.now(),
+ error: existingStream.error,
+ };
+ newCompletedStreams.set(sessionId, result);
+ newActiveStreams.delete(sessionId);
+ newCompletedStreams = cleanupExpiredStreams(newCompletedStreams);
+ if (normalizedStatus === "completed" || normalizedStatus === "error") {
+ notifyStreamComplete(callbacks, sessionId);
+ }
+ }
+
+ const abortController = new AbortController();
+ const initialCallbacks = new Set<(chunk: StreamChunk) => void>();
+ if (onChunk) initialCallbacks.add(onChunk);
+
+ const stream: ActiveStream = {
+ sessionId,
+ abortController,
+ status: "streaming",
+ startedAt: Date.now(),
+ chunks: [],
+ onChunkCallbacks: initialCallbacks,
+ };
+
+ newActiveStreams.set(sessionId, stream);
+ set({
+ activeStreams: newActiveStreams,
+ completedStreams: newCompletedStreams,
+ });
+
+ try {
+ await executeStream(stream, message, isUserMessage, context);
+ } finally {
+ if (onChunk) stream.onChunkCallbacks.delete(onChunk);
+ if (stream.status !== "streaming") {
+ const currentState = get();
+ const finalActiveStreams = new Map(currentState.activeStreams);
+ let finalCompletedStreams = new Map(currentState.completedStreams);
+
+ const storedStream = finalActiveStreams.get(sessionId);
+ if (storedStream === stream) {
+ const result: StreamResult = {
+ sessionId,
+ status: stream.status,
+ chunks: stream.chunks,
+ completedAt: Date.now(),
+ error: stream.error,
+ };
+ finalCompletedStreams.set(sessionId, result);
+ finalActiveStreams.delete(sessionId);
+ finalCompletedStreams = cleanupExpiredStreams(finalCompletedStreams);
+ set({
+ activeStreams: finalActiveStreams,
+ completedStreams: finalCompletedStreams,
+ });
+ if (stream.status === "completed" || stream.status === "error") {
+ notifyStreamComplete(
+ currentState.streamCompleteCallbacks,
+ sessionId,
+ );
+ }
+ }
+ }
+ }
+ },
+
+ stopStream: function stopStream(sessionId) {
+ const state = get();
+ const stream = state.activeStreams.get(sessionId);
+ if (!stream) return;
+
+ stream.abortController.abort();
+ stream.status = "completed";
+
+ const newActiveStreams = new Map(state.activeStreams);
+ let newCompletedStreams = new Map(state.completedStreams);
+
+ const result: StreamResult = {
+ sessionId,
+ status: stream.status,
+ chunks: stream.chunks,
+ completedAt: Date.now(),
+ error: stream.error,
+ };
+ newCompletedStreams.set(sessionId, result);
+ newActiveStreams.delete(sessionId);
+ newCompletedStreams = cleanupExpiredStreams(newCompletedStreams);
+
+ set({
+ activeStreams: newActiveStreams,
+ completedStreams: newCompletedStreams,
+ });
+
+ notifyStreamComplete(state.streamCompleteCallbacks, sessionId);
+ },
+
+ subscribeToStream: function subscribeToStream(
+ sessionId,
+ onChunk,
+ skipReplay = false,
+ ) {
+ const state = get();
+ const stream = state.activeStreams.get(sessionId);
+
+ if (stream) {
+ if (!skipReplay) {
+ for (const chunk of stream.chunks) {
+ onChunk(chunk);
+ }
+ }
+
+ stream.onChunkCallbacks.add(onChunk);
+
+ return function unsubscribe() {
+ stream.onChunkCallbacks.delete(onChunk);
+ };
+ }
+
+ return function noop() {};
+ },
+
+ getStreamStatus: function getStreamStatus(sessionId) {
+ const { activeStreams, completedStreams } = get();
+
+ const active = activeStreams.get(sessionId);
+ if (active) return active.status;
+
+ const completed = completedStreams.get(sessionId);
+ if (completed) return completed.status;
+
+ return "idle";
+ },
+
+ getCompletedStream: function getCompletedStream(sessionId) {
+ return get().completedStreams.get(sessionId);
+ },
+
+ clearCompletedStream: function clearCompletedStream(sessionId) {
+ const state = get();
+ if (!state.completedStreams.has(sessionId)) return;
+
+ const newCompletedStreams = new Map(state.completedStreams);
+ newCompletedStreams.delete(sessionId);
+ set({ completedStreams: newCompletedStreams });
+ },
+
+ isStreaming: function isStreaming(sessionId) {
+ const stream = get().activeStreams.get(sessionId);
+ return stream?.status === "streaming";
+ },
+
+ registerActiveSession: function registerActiveSession(sessionId) {
+ const state = get();
+ if (state.activeSessions.has(sessionId)) return;
+
+ const newActiveSessions = new Set(state.activeSessions);
+ newActiveSessions.add(sessionId);
+ set({ activeSessions: newActiveSessions });
+ },
+
+ unregisterActiveSession: function unregisterActiveSession(sessionId) {
+ const state = get();
+ if (!state.activeSessions.has(sessionId)) return;
+
+ const newActiveSessions = new Set(state.activeSessions);
+ newActiveSessions.delete(sessionId);
+ set({ activeSessions: newActiveSessions });
+ },
+
+ isSessionActive: function isSessionActive(sessionId) {
+ return get().activeSessions.has(sessionId);
+ },
+
+ onStreamComplete: function onStreamComplete(callback) {
+ const state = get();
+ const newCallbacks = new Set(state.streamCompleteCallbacks);
+ newCallbacks.add(callback);
+ set({ streamCompleteCallbacks: newCallbacks });
+
+ return function unsubscribe() {
+ const currentState = get();
+ const cleanedCallbacks = new Set(currentState.streamCompleteCallbacks);
+ cleanedCallbacks.delete(callback);
+ set({ streamCompleteCallbacks: cleanedCallbacks });
+ };
+ },
+}));
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts b/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts
new file mode 100644
index 0000000000..8c8aa7b704
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/chat-types.ts
@@ -0,0 +1,94 @@
+import type { ToolArguments, ToolResult } from "@/types/chat";
+
+export type StreamStatus = "idle" | "streaming" | "completed" | "error";
+
+export interface StreamChunk {
+ type:
+ | "text_chunk"
+ | "text_ended"
+ | "tool_call"
+ | "tool_call_start"
+ | "tool_response"
+ | "login_needed"
+ | "need_login"
+ | "credentials_needed"
+ | "error"
+ | "usage"
+ | "stream_end";
+ timestamp?: string;
+ content?: string;
+ message?: string;
+ code?: string;
+ details?: Record;
+ tool_id?: string;
+ tool_name?: string;
+ arguments?: ToolArguments;
+ result?: ToolResult;
+ success?: boolean;
+ idx?: number;
+ session_id?: string;
+ agent_info?: {
+ graph_id: string;
+ name: string;
+ trigger_type: string;
+ };
+ provider?: string;
+ provider_name?: string;
+ credential_type?: string;
+ scopes?: string[];
+ title?: string;
+ [key: string]: unknown;
+}
+
+export type VercelStreamChunk =
+ | { type: "start"; messageId: string }
+ | { type: "finish" }
+ | { type: "text-start"; id: string }
+ | { type: "text-delta"; id: string; delta: string }
+ | { type: "text-end"; id: string }
+ | { type: "tool-input-start"; toolCallId: string; toolName: string }
+ | {
+ type: "tool-input-available";
+ toolCallId: string;
+ toolName: string;
+ input: Record;
+ }
+ | {
+ type: "tool-output-available";
+ toolCallId: string;
+ toolName?: string;
+ output: unknown;
+ success?: boolean;
+ }
+ | {
+ type: "usage";
+ promptTokens: number;
+ completionTokens: number;
+ totalTokens: number;
+ }
+ | {
+ type: "error";
+ errorText: string;
+ code?: string;
+ details?: Record;
+ };
+
+export interface ActiveStream {
+ sessionId: string;
+ abortController: AbortController;
+ status: StreamStatus;
+ startedAt: number;
+ chunks: StreamChunk[];
+ error?: Error;
+ onChunkCallbacks: Set<(chunk: StreamChunk) => void>;
+}
+
+export interface StreamResult {
+ sessionId: string;
+ status: StreamStatus;
+ chunks: StreamChunk[];
+ completedAt: number;
+ error?: Error;
+}
+
+export type StreamCompleteCallback = (sessionId: string) => void;
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx
new file mode 100644
index 0000000000..f5d56fcb15
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/AIChatBubble/AIChatBubble.tsx
@@ -0,0 +1,15 @@
+import { cn } from "@/lib/utils";
+import { ReactNode } from "react";
+
+export interface AIChatBubbleProps {
+ children: ReactNode;
+ className?: string;
+}
+
+export function AIChatBubble({ children, className }: AIChatBubbleProps) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx
similarity index 100%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/AgentCarouselMessage/AgentCarouselMessage.tsx
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx
similarity index 100%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/AgentInputsSetup.tsx
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts
similarity index 100%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/AgentInputsSetup/useAgentInputsSetup.ts
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx
similarity index 99%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx
index 33f02e660f..b2cf92ec56 100644
--- a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/AuthPromptWidget/AuthPromptWidget.tsx
@@ -21,7 +21,7 @@ export function AuthPromptWidget({
message,
sessionId,
agentInfo,
- returnUrl = "/chat",
+ returnUrl = "/copilot/chat",
className,
}: AuthPromptWidgetProps) {
const router = useRouter();
diff --git a/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx
new file mode 100644
index 0000000000..dec221338a
--- /dev/null
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/ChatContainer.tsx
@@ -0,0 +1,130 @@
+import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
+import { Button } from "@/components/atoms/Button/Button";
+import { Text } from "@/components/atoms/Text/Text";
+import { Dialog } from "@/components/molecules/Dialog/Dialog";
+import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
+import { cn } from "@/lib/utils";
+import { GlobeHemisphereEastIcon } from "@phosphor-icons/react";
+import { useEffect } from "react";
+import { ChatInput } from "../ChatInput/ChatInput";
+import { MessageList } from "../MessageList/MessageList";
+import { useChatContainer } from "./useChatContainer";
+
+export interface ChatContainerProps {
+ sessionId: string | null;
+ initialMessages: SessionDetailResponse["messages"];
+ initialPrompt?: string;
+ className?: string;
+ onStreamingChange?: (isStreaming: boolean) => void;
+ onOperationStarted?: () => void;
+}
+
+export function ChatContainer({
+ sessionId,
+ initialMessages,
+ initialPrompt,
+ className,
+ onStreamingChange,
+ onOperationStarted,
+}: ChatContainerProps) {
+ const {
+ messages,
+ streamingChunks,
+ isStreaming,
+ stopStreaming,
+ isRegionBlockedModalOpen,
+ sendMessageWithContext,
+ handleRegionModalOpenChange,
+ handleRegionModalClose,
+ } = useChatContainer({
+ sessionId,
+ initialMessages,
+ initialPrompt,
+ onOperationStarted,
+ });
+
+ useEffect(() => {
+ onStreamingChange?.(isStreaming);
+ }, [isStreaming, onStreamingChange]);
+
+ const breakpoint = useBreakpoint();
+ const isMobile =
+ breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
+
+ return (
+
+
+ }
+ controlled={{
+ isOpen: isRegionBlockedModalOpen,
+ set: handleRegionModalOpenChange,
+ }}
+ onClose={handleRegionModalClose}
+ styling={{ maxWidth: 550, width: "100%", minWidth: "auto" }}
+ >
+
+
+
+ The Autogpt AI model is not available in your region or your
+ connection is blocking it. Please try again with a different
+ connection.
+
+
+
+
+
+
+
+ {/* Messages - Scrollable */}
+
+
+ {/* Input - Fixed at bottom */}
+
+
+ );
+}
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/createStreamEventDispatcher.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts
similarity index 53%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/createStreamEventDispatcher.ts
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts
index 844f126d49..82e9b05e88 100644
--- a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/createStreamEventDispatcher.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/createStreamEventDispatcher.ts
@@ -1,6 +1,6 @@
import { toast } from "sonner";
-import { StreamChunk } from "../../useChatStream";
-import type { HandlerDependencies } from "./useChatContainer.handlers";
+import type { StreamChunk } from "../../chat-types";
+import type { HandlerDependencies } from "./handlers";
import {
handleError,
handleLoginNeeded,
@@ -9,12 +9,30 @@ import {
handleTextEnded,
handleToolCallStart,
handleToolResponse,
-} from "./useChatContainer.handlers";
+ isRegionBlockedError,
+} from "./handlers";
export function createStreamEventDispatcher(
deps: HandlerDependencies,
): (chunk: StreamChunk) => void {
return function dispatchStreamEvent(chunk: StreamChunk): void {
+ if (
+ chunk.type === "text_chunk" ||
+ chunk.type === "tool_call_start" ||
+ chunk.type === "tool_response" ||
+ chunk.type === "login_needed" ||
+ chunk.type === "need_login" ||
+ chunk.type === "error"
+ ) {
+ if (!deps.hasResponseRef.current) {
+ console.info("[ChatStream] First response chunk:", {
+ type: chunk.type,
+ sessionId: deps.sessionId,
+ });
+ }
+ deps.hasResponseRef.current = true;
+ }
+
switch (chunk.type) {
case "text_chunk":
handleTextChunk(chunk, deps);
@@ -38,15 +56,23 @@ export function createStreamEventDispatcher(
break;
case "stream_end":
+ console.info("[ChatStream] Stream ended:", {
+ sessionId: deps.sessionId,
+ hasResponse: deps.hasResponseRef.current,
+ chunkCount: deps.streamingChunksRef.current.length,
+ });
handleStreamEnd(chunk, deps);
break;
case "error":
+ const isRegionBlocked = isRegionBlockedError(chunk);
handleError(chunk, deps);
// Show toast at dispatcher level to avoid circular dependencies
- toast.error("Chat Error", {
- description: chunk.message || chunk.content || "An error occurred",
- });
+ if (!isRegionBlocked) {
+ toast.error("Chat Error", {
+ description: chunk.message || chunk.content || "An error occurred",
+ });
+ }
break;
case "usage":
diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/useChatContainer.handlers.ts b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts
similarity index 57%
rename from autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/useChatContainer.handlers.ts
rename to autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts
index 064b847064..f3cac01f96 100644
--- a/autogpt_platform/frontend/src/app/(platform)/chat/components/Chat/components/ChatContainer/useChatContainer.handlers.ts
+++ b/autogpt_platform/frontend/src/components/contextual/Chat/components/ChatContainer/handlers.ts
@@ -7,13 +7,29 @@ import {
parseToolResponse,
} from "./helpers";
+function isToolCallMessage(
+ message: ChatMessageData,
+): message is Extract