From e640d36265bf8c73a694663fb27efdbfaaac4f9e Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Thu, 18 Dec 2025 09:12:55 +0530 Subject: [PATCH 01/25] feat(frontend): Add special handling for AGENT block type in form and output handlers (#11595) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Agent blocks require different handling compared to standard blocks, particularly for: - Handle ID generation (using direct keys instead of generated IDs) - Form data storage structure (nested under `inputs` key) - Field ID parsing (filtering out schema path prefixes) This PR implements special handling for `BlockUIType.AGENT` throughout the form rendering and output handling components to ensure agents work correctly in the flow editor. ### Changes 🏗️ - **CustomNode.tsx**: Pass `uiType` prop to `OutputHandler` component - **FormCreator.tsx**: - Store agent form data in `hardcodedValues.inputs` instead of directly in `hardcodedValues` - Extract initial values from `hardcodedValues.inputs` for agent blocks - **OutputHandler.tsx**: - Accept `uiType` prop - Use direct key as handle ID for agents instead of `generateHandleId(key)` - **useMarketplaceAgentsContent.ts**: - Fetch full agent details using `getV2GetLibraryAgent` before adding to builder - Ensures agent schemas are properly populated (fixes issue where marketplace endpoint returns empty schemas) - **AnyOfField.tsx**: Generate handle IDs for agents by filtering out "root" and "properties" from schema path - **FieldTemplate.tsx**: Apply same handle ID generation logic for agent fields ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Add an agent block from marketplace and verify it renders correctly - [x] Connect inputs/outputs to/from an agent block and verify connections work - [x] Fill in form fields for an agent block and verify data persists correctly - [x] Verify agent blocks work in both new and existing flows - [x] Test that non-agent blocks still work as before (regression test) --- .../nodes/CustomNode/CustomNode.tsx | 6 +++++- .../FlowEditor/nodes/FormCreator.tsx | 19 +++++++++++++++++-- .../FlowEditor/nodes/OutputHandler.tsx | 7 ++++++- .../BlockMenuSearch/useBlockMenuSearch.ts | 8 +++++++- .../useMarketplaceAgentsContent.ts | 11 ++++++++++- .../fields/AnyOfField/AnyOfField.tsx | 10 +++++++++- .../templates/FieldTemplate.tsx | 10 +++++++++- 7 files changed, 63 insertions(+), 8 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode.tsx index 974cbe3754..52068f3acb 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode.tsx @@ -106,7 +106,11 @@ export const CustomNode: React.FC> = React.memo( /> {data.uiType != BlockUIType.OUTPUT && ( - + )} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/FormCreator.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/FormCreator.tsx index 315a52f553..cfee0bf89f 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/FormCreator.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/FormCreator.tsx @@ -20,17 +20,32 @@ export const FormCreator = React.memo( className?: string; }) => { const updateNodeData = useNodeStore((state) => state.updateNodeData); + const getHardCodedValues = useNodeStore( (state) => state.getHardCodedValues, ); + const handleChange = ({ formData }: any) => { if ("credentials" in formData && !formData.credentials?.id) { delete formData.credentials; } - updateNodeData(nodeId, { hardcodedValues: formData }); + + const updatedValues = + uiType === BlockUIType.AGENT + ? { + ...getHardCodedValues(nodeId), + inputs: formData, + } + : formData; + + updateNodeData(nodeId, { hardcodedValues: updatedValues }); }; - const initialValues = getHardCodedValues(nodeId); + const hardcodedValues = getHardCodedValues(nodeId); + const initialValues = + uiType === BlockUIType.AGENT + ? (hardcodedValues.inputs ?? {}) + : hardcodedValues; return (
diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/OutputHandler.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/OutputHandler.tsx index 9c032ac20f..ab3b648ba9 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/OutputHandler.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/OutputHandler.tsx @@ -14,13 +14,16 @@ import { import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore"; import { getTypeDisplayInfo } from "./helpers"; import { generateHandleId } from "../handlers/helpers"; +import { BlockUIType } from "../../types"; export const OutputHandler = ({ outputSchema, nodeId, + uiType, }: { outputSchema: RJSFSchema; nodeId: string; + uiType: BlockUIType; }) => { const { isOutputConnected } = useEdgeStore(); const properties = outputSchema?.properties || {}; @@ -79,7 +82,9 @@ export const OutputHandler = ({ diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts index 5e9007e617..3eb14d3ca9 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts @@ -7,6 +7,7 @@ import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store"; import { getGetV2ListLibraryAgentsQueryKey, + getV2GetLibraryAgent, usePostV2AddMarketplaceAgent, } from "@/app/api/__generated__/endpoints/library/library"; import { @@ -151,7 +152,12 @@ export const useBlockMenuSearch = () => { }); const libraryAgent = response.data as LibraryAgent; - addAgentToBuilder(libraryAgent); + + const { data: libraryAgentDetails } = await getV2GetLibraryAgent( + libraryAgent.id, + ); + + addAgentToBuilder(libraryAgentDetails as LibraryAgent); toast({ title: "Agent Added", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts index 8ca3fe30f5..ff9b70b79a 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts @@ -1,6 +1,7 @@ import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default"; import { getGetV2ListLibraryAgentsQueryKey, + getV2GetLibraryAgent, usePostV2AddMarketplaceAgent, } from "@/app/api/__generated__/endpoints/library/library"; import { @@ -105,8 +106,16 @@ export const useMarketplaceAgentsContent = () => { }, }); + // Here, libraryAgent has empty input and output schemas. + // Not updating the endpoint because this endpoint is used elsewhere. + // TODO: Create a new endpoint for builder specific to marketplace agents. const libraryAgent = response.data as LibraryAgent; - addAgentToBuilder(libraryAgent); + + const { data: libraryAgentDetails } = await getV2GetLibraryAgent( + libraryAgent.id, + ); + + addAgentToBuilder(libraryAgentDetails as LibraryAgent); toast({ title: "Agent Added", diff --git a/autogpt_platform/frontend/src/components/renderers/input-renderer/fields/AnyOfField/AnyOfField.tsx b/autogpt_platform/frontend/src/components/renderers/input-renderer/fields/AnyOfField/AnyOfField.tsx index 7fb3d9c938..79fa15304d 100644 --- a/autogpt_platform/frontend/src/components/renderers/input-renderer/fields/AnyOfField/AnyOfField.tsx +++ b/autogpt_platform/frontend/src/components/renderers/input-renderer/fields/AnyOfField/AnyOfField.tsx @@ -23,6 +23,7 @@ import { TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; import { cn } from "@/lib/utils"; +import { BlockUIType } from "@/app/(platform)/build/components/types"; type TypeOption = { type: string; @@ -47,7 +48,14 @@ export const AnyOfField = ({ onBlur, onFocus, }: FieldProps) => { - const handleId = generateHandleId(idSchema.$id ?? ""); + const handleId = + formContext.uiType === BlockUIType.AGENT + ? (idSchema.$id ?? "") + .split("_") + .filter((p) => p !== "root" && p !== "properties" && p.length > 0) + .join("_") || "" + : generateHandleId(idSchema.$id ?? ""); + const updatedFormContexrt = { ...formContext, fromAnyOf: true }; const { nodeId, showHandles = true } = updatedFormContexrt; diff --git a/autogpt_platform/frontend/src/components/renderers/input-renderer/templates/FieldTemplate.tsx b/autogpt_platform/frontend/src/components/renderers/input-renderer/templates/FieldTemplate.tsx index a056782939..ebc8a1f038 100644 --- a/autogpt_platform/frontend/src/components/renderers/input-renderer/templates/FieldTemplate.tsx +++ b/autogpt_platform/frontend/src/components/renderers/input-renderer/templates/FieldTemplate.tsx @@ -58,7 +58,15 @@ const FieldTemplate: React.FC = ({ let handleId = null; if (!isArrayItem) { - handleId = generateHandleId(fieldId); + if (uiType === BlockUIType.AGENT) { + const parts = fieldId.split("_"); + const filtered = parts.filter( + (p) => p !== "root" && p !== "properties" && p.length > 0, + ); + handleId = filtered.join("_") || ""; + } else { + handleId = generateHandleId(fieldId); + } } else { handleId = arrayFieldHandleId; } From 9a1d9406776df8c759a77c4c9b817b8e53f8d4c7 Mon Sep 17 00:00:00 2001 From: Ubbe Date: Thu, 18 Dec 2025 18:08:19 +0100 Subject: [PATCH 02/25] fix(frontend): onboarding run card (#11636) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ ### Before Screenshot 2025-12-18 at 17 16 57 - extra label - overflow ### After Screenshot 2025-12-18 at 17 41 53 ## Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally - [x] Test the above --- .../app/(no-navbar)/onboarding/5-run/page.tsx | 22 ++++++------------- .../CredentialRow/CredentialRow.tsx | 7 ++++-- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx index 4b6abacbff..58960a0cf6 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx @@ -8,7 +8,6 @@ import { CardTitle, } from "@/components/__legacy__/ui/card"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; -import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip"; import { CircleNotchIcon } from "@phosphor-icons/react/dist/ssr"; import { Play } from "lucide-react"; import OnboardingButton from "../components/OnboardingButton"; @@ -79,20 +78,13 @@ export default function Page() { {Object.entries(agent?.input_schema.properties || {}).map( ([key, inputSubSchema]) => ( -
- - handleSetAgentInput(key, value)} - /> -
+ handleSetAgentInput(key, value)} + /> ), )}
- + {getCredentialDisplayName(credential, displayName)} {"*".repeat(MASKED_KEY_LENGTH)} From 0082a72657a5cca8af7ff6c9d08901d5c47ea6dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Dec 2025 11:16:22 -0600 Subject: [PATCH 03/25] chore(deps): Bump actions/labeler from 5 to 6 (#10868) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/labeler](https://github.com/actions/labeler) from 5 to 6.
Release notes

Sourced from actions/labeler's releases.

v6.0.0

What's Changed

Breaking Changes

Dependency Upgrades

Documentation changes

New Contributors

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/labeler&package-manager=github_actions&previous-version=5&new-version=6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/repo-pr-label.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/repo-pr-label.yml b/.github/workflows/repo-pr-label.yml index eef928ef16..97579c2784 100644 --- a/.github/workflows/repo-pr-label.yml +++ b/.github/workflows/repo-pr-label.yml @@ -61,6 +61,6 @@ jobs: pull-requests: write runs-on: ubuntu-latest steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@v6 with: sync-labels: true From 22078671df67c0db82dd10c810b256df40d65ef8 Mon Sep 17 00:00:00 2001 From: Bently Date: Thu, 18 Dec 2025 18:29:20 +0100 Subject: [PATCH 04/25] feat(frontend): increase file upload size limit to 256MB (#11634) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Updated Next.js configuration to set body size limits for server actions and API routes. - Enhanced error handling in the API client to provide user-friendly messages for file size errors. - Added user-friendly error messages for 413 Payload Too Large responses in API error parsing. These changes ensure that file uploads are consistent with backend limits and improve user experience during uploads. ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Upload a file bigger than 10MB and it works - [X] Upload a file bigger than 256MB and you see a official error stating the max file size is 256MB --- autogpt_platform/frontend/next.config.mjs | 8 +++++ .../src/app/api/proxy/[...path]/route.ts | 4 +++ .../src/lib/autogpt-server-api/client.ts | 32 ++++++++++++++++++- .../src/lib/autogpt-server-api/helpers.ts | 15 +++++++++ 4 files changed, 58 insertions(+), 1 deletion(-) diff --git a/autogpt_platform/frontend/next.config.mjs b/autogpt_platform/frontend/next.config.mjs index d4595990a2..e4e4cdf544 100644 --- a/autogpt_platform/frontend/next.config.mjs +++ b/autogpt_platform/frontend/next.config.mjs @@ -3,6 +3,14 @@ import { withSentryConfig } from "@sentry/nextjs"; /** @type {import('next').NextConfig} */ const nextConfig = { productionBrowserSourceMaps: true, + experimental: { + serverActions: { + bodySizeLimit: "256mb", + }, + // Increase body size limit for API routes (file uploads) - 256MB to match backend limit + proxyClientMaxBodySize: "256mb", + middlewareClientMaxBodySize: "256mb", + }, images: { domains: [ // We dont need to maintain alphabetical order here diff --git a/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts b/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts index 09235f9c3b..293c406373 100644 --- a/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts +++ b/autogpt_platform/frontend/src/app/api/proxy/[...path]/route.ts @@ -6,6 +6,10 @@ import { import { environment } from "@/services/environment"; import { NextRequest, NextResponse } from "next/server"; +// Increase body size limit to 256MB to match backend file upload limit +export const maxDuration = 300; // 5 minutes timeout for large uploads +export const dynamic = "force-dynamic"; + function buildBackendUrl(path: string[], queryString: string): string { const backendPath = path.join("/"); return `${environment.getAGPTServerBaseUrl()}/${backendPath}${queryString}`; diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/client.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/client.ts index 3b0666bf62..682fc14108 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/client.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/client.ts @@ -910,7 +910,37 @@ export default class BackendAPI { reject(new Error("Invalid JSON response")); } } else { - reject(new Error(`HTTP ${xhr.status}: ${xhr.statusText}`)); + // Handle file size errors with user-friendly message + if (xhr.status === 413) { + reject(new Error("File is too large — max size is 256MB")); + return; + } + + // Try to parse error response for better messages + let errorMessage = `Upload failed (${xhr.status})`; + try { + const errorData = JSON.parse(xhr.responseText); + if (errorData.detail) { + if ( + typeof errorData.detail === "string" && + errorData.detail.includes("exceeds the maximum") + ) { + const match = errorData.detail.match( + /maximum allowed size of (\d+)MB/, + ); + const maxSize = match ? match[1] : "256"; + errorMessage = `File is too large — max size is ${maxSize}MB`; + } else if (typeof errorData.detail === "string") { + errorMessage = errorData.detail; + } + } else if (errorData.error) { + errorMessage = errorData.error; + } + } catch { + // Keep default message if parsing fails + } + + reject(new Error(errorMessage)); } }); diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/helpers.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/helpers.ts index 7e20783042..4cb24df77d 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/helpers.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/helpers.ts @@ -184,6 +184,11 @@ export function serializeRequestBody( } export async function parseApiError(response: Response): Promise { + // Handle 413 Payload Too Large with user-friendly message + if (response.status === 413) { + return "File is too large — max size is 256MB"; + } + try { const errorData = await response.clone().json(); @@ -205,6 +210,16 @@ export async function parseApiError(response: Response): Promise { return response.statusText; // Fallback to status text if no message } + // Check for file size error from backend + if ( + typeof errorData.detail === "string" && + errorData.detail.includes("exceeds the maximum") + ) { + const match = errorData.detail.match(/maximum allowed size of (\d+)MB/); + const maxSize = match ? match[1] : "256"; + return `File is too large — max size is ${maxSize}MB`; + } + return errorData.detail || errorData.error || response.statusText; } catch { return response.statusText; From cab498fa8ce3469ddde8d4c2e4b1f8ab1dd55e02 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Dec 2025 17:34:04 +0000 Subject: [PATCH 05/25] chore(deps): Bump actions/stale from 9 to 10 (#10871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/stale](https://github.com/actions/stale) from 9 to 10.
Release notes

Sourced from actions/stale's releases.

v10.0.0

What's Changed

Breaking Changes

Enhancement

Dependency Upgrades

Documentation changes

New Contributors

Full Changelog: https://github.com/actions/stale/compare/v9...v10.0.0

v9.1.0

What's Changed

New Contributors

Full Changelog: https://github.com/actions/stale/compare/v9...v9.1.0

Changelog

Sourced from actions/stale's changelog.

Changelog

[9.1.0]

What's Changed

[9.0.0]

Breaking Changes

  1. Action is now stateful: If the action ends because of operations-per-run then the next run will start from the first unprocessed issue skipping the issues processed during the previous run(s). The state is reset when all the issues are processed. This should be considered for scheduling workflow runs.
  2. Version 9 of this action updated the runtime to Node.js 20. All scripts are now run with Node.js 20 instead of Node.js 16 and are affected by any breaking changes between Node.js 16 and 20.

What Else Changed

  1. Performance optimization that removes unnecessary API calls by @​dsame in #1033; fixes #792
  2. Logs displaying current GitHub API rate limit by @​dsame in #1032; addresses #1029

For more information, please read the action documentation and its section about statefulness

[4.1.1]

In scope of this release we updated actions/core to 1.10.0 for v4 and fixed issues operation count.

[8.0.0]

:warning: This version contains breaking changes :warning:

[7.0.0]

:warning: Breaking change :warning:

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/stale&package-manager=github_actions&previous-version=9&new-version=10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. --- > [!NOTE] > Update the stale-issues workflow to use `actions/stale@v10` instead of `v9`. > > Written by [Cursor Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit 747d4ea73adf8f56c5b7c91653c7edcb2a32e1ba. This will update automatically on new commits. Configure [here](https://cursor.com/dashboard?tab=bugbot). Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Nicholas Tindle --- .github/workflows/repo-close-stale-issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/repo-close-stale-issues.yml b/.github/workflows/repo-close-stale-issues.yml index a9f183d775..d58459daa1 100644 --- a/.github/workflows/repo-close-stale-issues.yml +++ b/.github/workflows/repo-close-stale-issues.yml @@ -11,7 +11,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v9 + - uses: actions/stale@v10 with: # operations-per-run: 5000 stale-issue-message: > From 99e2261254df6aadc7ed97df235f7d696b8b44b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Dec 2025 18:55:57 +0100 Subject: [PATCH 06/25] chore(frontend/deps-dev): bump eslint-config-next from 15.5.2 to 15.5.6 in /autogpt_platform/frontend (#11355) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [eslint-config-next](https://github.com/vercel/next.js/tree/HEAD/packages/eslint-config-next) from 15.5.2 to 15.5.6.
Release notes

Sourced from eslint-config-next's releases.

v15.5.6

[!NOTE]
This release is backporting bug fixes. It does not include all pending features/changes on canary.

Core Changes

  • Turbopack: don't define process.cwd() in node_modules #83452

Credits

Huge thanks to @​mischnic for helping!

v15.5.5

[!NOTE]
This release is backporting bug fixes. It does not include all pending features/changes on canary.

Core Changes

  • Split code-frame into separate compiled package (#84238)
  • Add deprecation warning to Runtime config (#84650)
  • fix: unstable_cache should perform blocking revalidation during ISR revalidation (#84716)
  • feat: experimental.middlewareClientMaxBodySize body cloning limit (#84722)
  • fix: missing next/link types with typedRoutes (#84779)

Misc Changes

  • docs: early October improvements and fixes (#84334)

Credits

Huge thanks to @​devjiwonchoi, @​ztanner, and @​icyJoseph for helping!

v15.5.4

[!NOTE]
This release is backporting bug fixes. It does not include all pending features/changes on canary.

Core Changes

  • fix: ensure onRequestError is invoked when otel enabled (#83343)
  • fix: devtools initial position should be from next config (#83571)
  • [devtool] fix overlay styles are missing (#83721)
  • Turbopack: don't match dynamic pattern for node_modules packages (#83176)
  • Turbopack: don't treat metadata routes as RSC (#82911)
  • [turbopack] Improve handling of symlink resolution errors in track_glob and read_glob (#83357)
  • Turbopack: throw large static metadata error earlier (#82939)
  • fix: error overlay not closing when backdrop clicked (#83981)
  • Turbopack: flush Node.js worker IPC on error (#84077)

Misc Changes

  • [CNA] use linter preference (#83194)
  • CI: use KV for test timing data (#83745)

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=eslint-config-next&package-manager=npm_and_yarn&previous-version=15.5.2&new-version=15.5.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lluis Agusti Co-authored-by: Ubbe --- autogpt_platform/frontend/package.json | 2 +- autogpt_platform/frontend/pnpm-lock.yaml | 269 +++++++++++++++++------ 2 files changed, 201 insertions(+), 70 deletions(-) diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index 89c367c788..ff2175baa1 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -137,7 +137,7 @@ "concurrently": "9.2.1", "cross-env": "10.1.0", "eslint": "8.57.1", - "eslint-config-next": "15.5.2", + "eslint-config-next": "15.5.7", "eslint-plugin-storybook": "9.1.5", "import-in-the-middle": "1.14.2", "msw": "2.11.6", diff --git a/autogpt_platform/frontend/pnpm-lock.yaml b/autogpt_platform/frontend/pnpm-lock.yaml index de9e029fac..d1d832549a 100644 --- a/autogpt_platform/frontend/pnpm-lock.yaml +++ b/autogpt_platform/frontend/pnpm-lock.yaml @@ -331,8 +331,8 @@ importers: specifier: 8.57.1 version: 8.57.1 eslint-config-next: - specifier: 15.5.2 - version: 15.5.2(eslint@8.57.1)(typescript@5.9.3) + specifier: 15.5.7 + version: 15.5.7(eslint@8.57.1)(typescript@5.9.3) eslint-plugin-storybook: specifier: 9.1.5 version: 9.1.5(eslint@8.57.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(typescript@5.9.3) @@ -986,12 +986,15 @@ packages: '@date-fns/tz@1.4.1': resolution: {integrity: sha512-P5LUNhtbj6YfI3iJjw5EL9eUAG6OitD0W3fWQcpQjDRc/QIsL0tRNuO1PcDvPccWL1fSTXXdE1ds+l95DV/OFA==} - '@emnapi/core@1.5.0': - resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} + '@emnapi/core@1.7.1': + resolution: {integrity: sha512-o1uhUASyo921r2XtHYOHy7gdkGLge8ghBEQHMWmyJFoXlpU58kIrhhN3w26lpQb6dspetweapMn2CSNwQ8I4wg==} '@emnapi/runtime@1.5.0': resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} + '@emnapi/runtime@1.7.1': + resolution: {integrity: sha512-PVtJr5CmLwYAU9PZDMITZoR5iAOShYREoR45EyyLrbntV50mdePTgUn4AmOw90Ifcj+x2kRjdzr1HP3RrNiHGA==} + '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} @@ -1329,6 +1332,10 @@ packages: resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + '@eslint/eslintrc@2.1.4': resolution: {integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -1605,8 +1612,8 @@ packages: '@next/env@15.4.10': resolution: {integrity: sha512-knhmoJ0Vv7VRf6pZEPSnciUG1S4bIhWx+qTYBW/AjxEtlzsiNORPk8sFDCEvqLfmKuey56UB9FL1UdHEV3uBrg==} - '@next/eslint-plugin-next@15.5.2': - resolution: {integrity: sha512-lkLrRVxcftuOsJNhWatf1P2hNVfh98k/omQHrCEPPriUypR6RcS13IvLdIrEvkm9AH2Nu2YpR5vLqBuy6twH3Q==} + '@next/eslint-plugin-next@15.5.7': + resolution: {integrity: sha512-DtRU2N7BkGr8r+pExfuWHwMEPX5SD57FeA6pxdgCHODo+b/UgIgjE+rgWKtJAbEbGhVZ2jtHn4g3wNhWFoNBQQ==} '@next/swc-darwin-arm64@15.4.8': resolution: {integrity: sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==} @@ -2622,8 +2629,8 @@ packages: '@rtsao/scc@1.1.0': resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} - '@rushstack/eslint-patch@1.12.0': - resolution: {integrity: sha512-5EwMtOqvJMMa3HbmxLlF74e+3/HhwBTMcvt3nqVJgGCozO6hzIPOBlwm8mGVNR9SN2IJpxSnlxczyDjcn7qIyw==} + '@rushstack/eslint-patch@1.15.0': + resolution: {integrity: sha512-ojSshQPKwVvSMR8yT2L/QtUkV5SXi/IfDiJ4/8d6UbTPjiHVmxZzUAzGD8Tzks1b9+qQkZa0isUOvYObedITaw==} '@scarf/scarf@1.4.0': resolution: {integrity: sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==} @@ -3097,8 +3104,8 @@ packages: peerDependencies: '@testing-library/dom': '>=7.21.4' - '@tybys/wasm-util@0.10.0': - resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} '@types/aria-query@5.0.4': resolution: {integrity: sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==} @@ -3288,16 +3295,16 @@ packages: '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} - '@typescript-eslint/eslint-plugin@8.43.0': - resolution: {integrity: sha512-8tg+gt7ENL7KewsKMKDHXR1vm8tt9eMxjJBYINf6swonlWgkYn5NwyIgXpbbDxTNU5DgpDFfj95prcTq2clIQQ==} + '@typescript-eslint/eslint-plugin@8.48.1': + resolution: {integrity: sha512-X63hI1bxl5ohelzr0LY5coufyl0LJNthld+abwxpCoo6Gq+hSqhKwci7MUWkXo67mzgUK6YFByhmaHmUcuBJmA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.43.0 + '@typescript-eslint/parser': ^8.48.1 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.43.0': - resolution: {integrity: sha512-B7RIQiTsCBBmY+yW4+ILd6mF5h1FUwJsVvpqkrgpszYifetQ2Ke+Z4u6aZh0CblkUGIdR59iYVyXqqZGkZ3aBw==} + '@typescript-eslint/parser@8.48.1': + resolution: {integrity: sha512-PC0PDZfJg8sP7cmKe6L3QIL8GZwU5aRvUFedqSIpw3B+QjRSUZeeITC2M5XKeMXEzL6wccN196iy3JLwKNvDVA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3315,6 +3322,12 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/project-service@8.48.1': + resolution: {integrity: sha512-HQWSicah4s9z2/HifRPQ6b6R7G+SBx64JlFQpgSSHWPKdvCZX57XCbszg/bapbRsOEv42q5tayTYcEFpACcX1w==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/scope-manager@8.43.0': resolution: {integrity: sha512-daSWlQ87ZhsjrbMLvpuuMAt3y4ba57AuvadcR7f3nl8eS3BjRc8L9VLxFLk92RL5xdXOg6IQ+qKjjqNEimGuAg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3323,6 +3336,10 @@ packages: resolution: {integrity: sha512-LF4b/NmGvdWEHD2H4MsHD8ny6JpiVNDzrSZr3CsckEgCbAGZbYM4Cqxvi9L+WqDMT+51Ozy7lt2M+d0JLEuBqA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/scope-manager@8.48.1': + resolution: {integrity: sha512-rj4vWQsytQbLxC5Bf4XwZ0/CKd362DkWMUkviT7DCS057SK64D5lH74sSGzhI6PDD2HCEq02xAP9cX68dYyg1w==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/tsconfig-utils@8.43.0': resolution: {integrity: sha512-ALC2prjZcj2YqqL5X/bwWQmHA2em6/94GcbB/KKu5SX3EBDOsqztmmX1kMkvAJHzxk7TazKzJfFiEIagNV3qEA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3335,8 +3352,14 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.43.0': - resolution: {integrity: sha512-qaH1uLBpBuBBuRf8c1mLJ6swOfzCXryhKND04Igr4pckzSEW9JX5Aw9AgW00kwfjWJF0kk0ps9ExKTfvXfw4Qg==} + '@typescript-eslint/tsconfig-utils@8.48.1': + resolution: {integrity: sha512-k0Jhs4CpEffIBm6wPaCXBAD7jxBtrHjrSgtfCjUvPp9AZ78lXKdTR8fxyZO5y4vWNlOvYXRtngSZNSn+H53Jkw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/type-utils@8.48.1': + resolution: {integrity: sha512-1jEop81a3LrJQLTf/1VfPQdhIY4PlGDBc/i67EVWObrtvcziysbLN3oReexHOM6N3jyXgCrkBsZpqwH0hiDOQg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3350,6 +3373,10 @@ packages: resolution: {integrity: sha512-lNCWCbq7rpg7qDsQrd3D6NyWYu+gkTENkG5IKYhUIcxSb59SQC/hEQ+MrG4sTgBVghTonNWq42bA/d4yYumldQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/types@8.48.1': + resolution: {integrity: sha512-+fZ3LZNeiELGmimrujsDCT4CRIbq5oXdHe7chLiW8qzqyPMnn1puNstCrMNVAqwcl2FdIxkuJ4tOs/RFDBVc/Q==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/typescript-estree@8.43.0': resolution: {integrity: sha512-7Vv6zlAhPb+cvEpP06WXXy/ZByph9iL6BQRBDj4kmBsW98AqEeQHlj/13X+sZOrKSo9/rNKH4Ul4f6EICREFdw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3362,6 +3389,12 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/typescript-estree@8.48.1': + resolution: {integrity: sha512-/9wQ4PqaefTK6POVTjJaYS0bynCgzh6ClJHGSBj06XEHjkfylzB+A3qvyaXnErEZSaxhIo4YdyBgq6j4RysxDg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/utils@8.43.0': resolution: {integrity: sha512-S1/tEmkUeeswxd0GGcnwuVQPFWo8NzZTOMxCvw8BX7OMxnNae+i8Tm7REQen/SwUIPoPqfKn7EaZ+YLpiB3k9g==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3376,6 +3409,13 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/utils@8.48.1': + resolution: {integrity: sha512-fAnhLrDjiVfey5wwFRwrweyRlCmdz5ZxXz2G/4cLn0YDLjTapmN4gcCsTBR1N2rWnZSDeWpYtgLDsJt+FpmcwA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + '@typescript-eslint/visitor-keys@8.43.0': resolution: {integrity: sha512-T+S1KqRD4sg/bHfLwrpF/K3gQLBM1n7Rp7OjjikjTEssI2YJzQpi5WXoynOaQ93ERIuq3O8RBTOUYDKszUCEHw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3384,6 +3424,10 @@ packages: resolution: {integrity: sha512-tUFMXI4gxzzMXt4xpGJEsBsTox0XbNQ1y94EwlD/CuZwFcQP79xfQqMhau9HsRc/J0cAPA/HZt1dZPtGn9V/7w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/visitor-keys@8.48.1': + resolution: {integrity: sha512-BmxxndzEWhE4TIEEMBs8lP3MBWN3jFPs/p6gPm/wkv02o41hI6cq9AuSmGAaTTHPtA1FTi2jBre4A9rm5ZmX+Q==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} @@ -4585,8 +4629,8 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} - eslint-config-next@15.5.2: - resolution: {integrity: sha512-3hPZghsLupMxxZ2ggjIIrat/bPniM2yRpsVPVM40rp8ZMzKWOJp2CGWn7+EzoV2ddkUr5fxNfHpF+wU1hGt/3g==} + eslint-config-next@15.5.7: + resolution: {integrity: sha512-nU/TRGHHeG81NeLW5DeQT5t6BDUqbpsNQTvef1ld/tqHT+/zTx60/TIhKnmPISTTe++DVo+DLxDmk4rnwHaZVw==} peerDependencies: eslint: ^7.23.0 || ^8.0.0 || ^9.0.0 typescript: '>=3.3.1' @@ -4918,6 +4962,10 @@ packages: peerDependencies: next: '>=13.2.0' + generator-function@2.0.1: + resolution: {integrity: sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==} + engines: {node: '>= 0.4'} + gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -4946,8 +4994,8 @@ packages: resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==} engines: {node: '>= 0.4'} - get-tsconfig@4.10.1: - resolution: {integrity: sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==} + get-tsconfig@4.13.0: + resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==} github-slugger@2.0.0: resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==} @@ -5282,6 +5330,10 @@ packages: resolution: {integrity: sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==} engines: {node: '>= 0.4'} + is-generator-function@1.1.2: + resolution: {integrity: sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==} + engines: {node: '>= 0.4'} + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -5903,8 +5955,8 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - napi-postinstall@0.3.3: - resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==} + napi-postinstall@0.3.4: + resolution: {integrity: sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} hasBin: true @@ -6769,6 +6821,11 @@ packages: engines: {node: '>= 0.4'} hasBin: true + resolve@1.22.11: + resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} + engines: {node: '>= 0.4'} + hasBin: true + resolve@1.22.8: resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==} hasBin: true @@ -7858,7 +7915,7 @@ snapshots: '@babel/helper-plugin-utils': 7.27.1 debug: 4.4.3 lodash.debounce: 4.0.8 - resolve: 1.22.10 + resolve: 1.22.11 transitivePeerDependencies: - supports-color @@ -8550,7 +8607,7 @@ snapshots: '@date-fns/tz@1.4.1': {} - '@emnapi/core@1.5.0': + '@emnapi/core@1.7.1': dependencies: '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 @@ -8561,6 +8618,11 @@ snapshots: tslib: 2.8.1 optional: true + '@emnapi/runtime@1.7.1': + dependencies: + tslib: 2.8.1 + optional: true + '@emnapi/wasi-threads@1.1.0': dependencies: tslib: 2.8.1 @@ -8739,6 +8801,8 @@ snapshots: '@eslint-community/regexpp@4.12.1': {} + '@eslint-community/regexpp@4.12.2': {} + '@eslint/eslintrc@2.1.4': dependencies: ajv: 6.12.6 @@ -8996,16 +9060,16 @@ snapshots: '@napi-rs/wasm-runtime@0.2.12': dependencies: - '@emnapi/core': 1.5.0 - '@emnapi/runtime': 1.5.0 - '@tybys/wasm-util': 0.10.0 + '@emnapi/core': 1.7.1 + '@emnapi/runtime': 1.7.1 + '@tybys/wasm-util': 0.10.1 optional: true '@neoconfetti/react@1.0.0': {} '@next/env@15.4.10': {} - '@next/eslint-plugin-next@15.5.2': + '@next/eslint-plugin-next@15.5.7': dependencies: fast-glob: 3.3.1 @@ -10115,7 +10179,7 @@ snapshots: '@rtsao/scc@1.1.0': {} - '@rushstack/eslint-patch@1.12.0': {} + '@rushstack/eslint-patch@1.15.0': {} '@scarf/scarf@1.4.0': {} @@ -10867,7 +10931,7 @@ snapshots: dependencies: '@testing-library/dom': 10.4.1 - '@tybys/wasm-util@0.10.0': + '@tybys/wasm-util@0.10.1': dependencies: tslib: 2.8.1 optional: true @@ -11065,14 +11129,14 @@ snapshots: dependencies: '@types/node': 24.10.0 - '@typescript-eslint/eslint-plugin@8.43.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)': dependencies: - '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.43.0 - '@typescript-eslint/type-utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3) - '@typescript-eslint/utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.43.0 + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.48.1 + '@typescript-eslint/type-utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.48.1 eslint: 8.57.1 graphemer: 1.4.0 ignore: 7.0.5 @@ -11082,12 +11146,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3)': + '@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.43.0 - '@typescript-eslint/types': 8.43.0 - '@typescript-eslint/typescript-estree': 8.43.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.43.0 + '@typescript-eslint/scope-manager': 8.48.1 + '@typescript-eslint/types': 8.48.1 + '@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.48.1 debug: 4.4.3 eslint: 8.57.1 typescript: 5.9.3 @@ -11097,7 +11161,7 @@ snapshots: '@typescript-eslint/project-service@8.43.0(typescript@5.9.3)': dependencies: '@typescript-eslint/tsconfig-utils': 8.43.0(typescript@5.9.3) - '@typescript-eslint/types': 8.43.0 + '@typescript-eslint/types': 8.48.1 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: @@ -11106,7 +11170,16 @@ snapshots: '@typescript-eslint/project-service@8.46.2(typescript@5.9.3)': dependencies: '@typescript-eslint/tsconfig-utils': 8.46.2(typescript@5.9.3) - '@typescript-eslint/types': 8.46.2 + '@typescript-eslint/types': 8.48.1 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.48.1(typescript@5.9.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.48.1(typescript@5.9.3) + '@typescript-eslint/types': 8.48.1 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: @@ -11122,6 +11195,11 @@ snapshots: '@typescript-eslint/types': 8.46.2 '@typescript-eslint/visitor-keys': 8.46.2 + '@typescript-eslint/scope-manager@8.48.1': + dependencies: + '@typescript-eslint/types': 8.48.1 + '@typescript-eslint/visitor-keys': 8.48.1 + '@typescript-eslint/tsconfig-utils@8.43.0(typescript@5.9.3)': dependencies: typescript: 5.9.3 @@ -11130,11 +11208,15 @@ snapshots: dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.43.0(eslint@8.57.1)(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.48.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.43.0 - '@typescript-eslint/typescript-estree': 8.43.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.43.0(eslint@8.57.1)(typescript@5.9.3) + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.48.1(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.48.1 + '@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.48.1(eslint@8.57.1)(typescript@5.9.3) debug: 4.4.3 eslint: 8.57.1 ts-api-utils: 2.1.0(typescript@5.9.3) @@ -11146,6 +11228,8 @@ snapshots: '@typescript-eslint/types@8.46.2': {} + '@typescript-eslint/types@8.48.1': {} + '@typescript-eslint/typescript-estree@8.43.0(typescript@5.9.3)': dependencies: '@typescript-eslint/project-service': 8.43.0(typescript@5.9.3) @@ -11156,7 +11240,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.2 + semver: 7.7.3 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11178,6 +11262,21 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/typescript-estree@8.48.1(typescript@5.9.3)': + dependencies: + '@typescript-eslint/project-service': 8.48.1(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.48.1(typescript@5.9.3) + '@typescript-eslint/types': 8.48.1 + '@typescript-eslint/visitor-keys': 8.48.1 + debug: 4.4.3 + minimatch: 9.0.5 + semver: 7.7.3 + tinyglobby: 0.2.15 + ts-api-utils: 2.1.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/utils@8.43.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) @@ -11200,6 +11299,17 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/utils@8.48.1(eslint@8.57.1)(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) + '@typescript-eslint/scope-manager': 8.48.1 + '@typescript-eslint/types': 8.48.1 + '@typescript-eslint/typescript-estree': 8.48.1(typescript@5.9.3) + eslint: 8.57.1 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/visitor-keys@8.43.0': dependencies: '@typescript-eslint/types': 8.43.0 @@ -11210,6 +11320,11 @@ snapshots: '@typescript-eslint/types': 8.46.2 eslint-visitor-keys: 4.2.1 + '@typescript-eslint/visitor-keys@8.48.1': + dependencies: + '@typescript-eslint/types': 8.48.1 + eslint-visitor-keys: 4.2.1 + '@ungap/structured-clone@1.3.0': {} '@unrs/resolver-binding-android-arm-eabi@1.11.1': @@ -12532,16 +12647,16 @@ snapshots: escape-string-regexp@5.0.0: {} - eslint-config-next@15.5.2(eslint@8.57.1)(typescript@5.9.3): + eslint-config-next@15.5.7(eslint@8.57.1)(typescript@5.9.3): dependencies: - '@next/eslint-plugin-next': 15.5.2 - '@rushstack/eslint-patch': 1.12.0 - '@typescript-eslint/eslint-plugin': 8.43.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3) - '@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3) + '@next/eslint-plugin-next': 15.5.7 + '@rushstack/eslint-patch': 1.15.0 + '@typescript-eslint/eslint-plugin': 8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1) eslint-plugin-react: 7.37.5(eslint@8.57.1) eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1) @@ -12556,7 +12671,7 @@ snapshots: dependencies: debug: 3.2.7 is-core-module: 2.16.1 - resolve: 1.22.10 + resolve: 1.22.11 transitivePeerDependencies: - supports-color @@ -12565,28 +12680,28 @@ snapshots: '@nolyfill/is-core-module': 1.0.39 debug: 4.4.3 eslint: 8.57.1 - get-tsconfig: 4.10.1 + get-tsconfig: 4.13.0 is-bun-module: 2.0.0 stable-hash: 0.0.5 tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -12597,7 +12712,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.43.0(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -12609,7 +12724,7 @@ snapshots: string.prototype.trimend: 1.0.9 tsconfig-paths: 3.15.0 optionalDependencies: - '@typescript-eslint/parser': 8.43.0(eslint@8.57.1)(typescript@5.9.3) + '@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -12958,6 +13073,8 @@ snapshots: dependencies: next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + generator-function@2.0.1: {} + gensync@1.0.0-beta.2: {} get-caller-file@2.0.5: {} @@ -12990,7 +13107,7 @@ snapshots: es-errors: 1.3.0 get-intrinsic: 1.3.0 - get-tsconfig@4.10.1: + get-tsconfig@4.13.0: dependencies: resolve-pkg-maps: 1.0.0 @@ -13357,7 +13474,7 @@ snapshots: is-bun-module@2.0.0: dependencies: - semver: 7.7.2 + semver: 7.7.3 is-callable@1.2.7: {} @@ -13395,6 +13512,14 @@ snapshots: has-tostringtag: 1.0.2 safe-regex-test: 1.1.0 + is-generator-function@1.1.2: + dependencies: + call-bound: 1.0.4 + generator-function: 2.0.1 + get-proto: 1.0.1 + has-tostringtag: 1.0.2 + safe-regex-test: 1.1.0 + is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -14215,7 +14340,7 @@ snapshots: nanoid@3.3.11: {} - napi-postinstall@0.3.3: {} + napi-postinstall@0.3.4: {} natural-compare@1.4.0: {} @@ -15185,6 +15310,12 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + resolve@1.22.11: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + resolve@1.22.8: dependencies: is-core-module: 2.16.1 @@ -15996,7 +16127,7 @@ snapshots: unrs-resolver@1.11.1: dependencies: - napi-postinstall: 0.3.3 + napi-postinstall: 0.3.4 optionalDependencies: '@unrs/resolver-binding-android-arm-eabi': 1.11.1 '@unrs/resolver-binding-android-arm64': 1.11.1 @@ -16224,7 +16355,7 @@ snapshots: is-async-function: 2.1.1 is-date-object: 1.1.0 is-finalizationregistry: 1.1.1 - is-generator-function: 1.1.0 + is-generator-function: 1.1.2 is-regex: 1.2.1 is-weakref: 1.1.1 isarray: 2.0.5 From 4c474417bc942b41f80531bd6e6aef57f254f7ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Dec 2025 18:58:27 +0100 Subject: [PATCH 07/25] chore(frontend/deps-dev): bump import-in-the-middle from 1.14.2 to 2.0.0 in /autogpt_platform/frontend (#11357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [import-in-the-middle](https://github.com/nodejs/import-in-the-middle) from 1.14.2 to 2.0.0.
Release notes

Sourced from import-in-the-middle's releases.

import-in-the-middle: v2.0.0

2.0.0 (2025-10-14)

⚠ BREAKING CHANGES

This was only a new major out of an abundance of caution. The hook code has been converted to ESM to work around some loader issues. There should actually be no breaking changes when using import-in-the-middle/hook.mjs or the exported Hook API.

Features

  • convert all modules running in loader thread to ESM (#210) (da7c7a6)

import-in-the-middle: v1.15.0

1.15.0 (2025-10-09)

Features

  • Compatibility with specifier imports (#211) (83d662a)

import-in-the-middle: v1.14.4

1.14.4 (2025-09-25)

Bug Fixes

import-in-the-middle: v1.14.3

1.14.3 (2025-09-24)

Bug Fixes

Changelog

Sourced from import-in-the-middle's changelog.

2.0.0 (2025-10-14)

⚠ BREAKING CHANGES

Converting all modules running in the loader thread to ESM should not be a breaking change for most users since it primarily affects internal implementation details. However, if you were referencing internal CJS files like hook.js this will no longer work.

Features

  • convert all modules running in loader thread to ESM (#210) (da7c7a6)

1.15.0 (2025-10-09)

Features

  • Compatibility with specifier imports (#211) (83d662a)

1.14.4 (2025-09-25)

Bug Fixes

1.14.3 (2025-09-24)

Bug Fixes

Commits
Maintainer changes

This version was pushed to npm by [GitHub Actions](https://www.npmjs.com/~GitHub Actions), a new releaser for import-in-the-middle since your current version.


[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=import-in-the-middle&package-manager=npm_and_yarn&previous-version=1.14.2&new-version=2.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lluis Agusti Co-authored-by: Ubbe --- autogpt_platform/frontend/package.json | 1 - autogpt_platform/frontend/pnpm-lock.yaml | 13 ------------- 2 files changed, 14 deletions(-) diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index ff2175baa1..4cbd867cd8 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -139,7 +139,6 @@ "eslint": "8.57.1", "eslint-config-next": "15.5.7", "eslint-plugin-storybook": "9.1.5", - "import-in-the-middle": "1.14.2", "msw": "2.11.6", "msw-storybook-addon": "2.0.6", "orval": "7.13.0", diff --git a/autogpt_platform/frontend/pnpm-lock.yaml b/autogpt_platform/frontend/pnpm-lock.yaml index d1d832549a..54843fc589 100644 --- a/autogpt_platform/frontend/pnpm-lock.yaml +++ b/autogpt_platform/frontend/pnpm-lock.yaml @@ -336,9 +336,6 @@ importers: eslint-plugin-storybook: specifier: 9.1.5 version: 9.1.5(eslint@8.57.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(typescript@5.9.3) - import-in-the-middle: - specifier: 1.14.2 - version: 1.14.2 msw: specifier: 2.11.6 version: 2.11.6(@types/node@24.10.0)(typescript@5.9.3) @@ -5216,9 +5213,6 @@ packages: resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} engines: {node: '>=6'} - import-in-the-middle@1.14.2: - resolution: {integrity: sha512-5tCuY9BV8ujfOpwtAGgsTx9CGUapcFMEEyByLv1B+v2+6DhAcw+Zr0nhQT7uwaZ7DiourxFEscghOR8e1aPLQw==} - import-in-the-middle@2.0.0: resolution: {integrity: sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A==} @@ -13391,13 +13385,6 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 - import-in-the-middle@1.14.2: - dependencies: - acorn: 8.15.0 - acorn-import-attributes: 1.9.5(acorn@8.15.0) - cjs-module-lexer: 1.4.3 - module-details-from-path: 1.0.4 - import-in-the-middle@2.0.0: dependencies: acorn: 8.15.0 From cd3e35df9ef725b2d717c96bfd94a1591329eb84 Mon Sep 17 00:00:00 2001 From: Ubbe Date: Thu, 18 Dec 2025 19:33:30 +0100 Subject: [PATCH 08/25] fix(frontend): small library/mobile improvements (#11626) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ Adds the following improvements: ### Prevent credential row overflowing on mobile 📱 **Before** Screenshot 2025-12-15 at 16 42 05 **After** Screenshot 2025-12-15 at 16 44 22 _Just hide the ****** on mobile..._ ### Make touch targets bigger on 📱 on the mobile menu **Before** Screenshot 2025-12-15 at 16 58 28 Touch targets were quite small on mobile, especially for people with big fingers... **After** Screenshot 2025-12-15 at 16 54 02 ### New `` component Screenshot 2025-12-15 at 16 48 20 A component that will render text like ``, but automatically displays `...` and the full text content on a tooltip if it detects there is no space for the full text length. Pretty useful for the type of dashboard we are building, where sometimes titles or user-generated content can be quite long, making the UI look whack. ### Google Drive Picker Only allow the removal of files if it is not in read-only mode. ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Checkout branch locally - [x] Test the above --- .../CredentialRow/CredentialRow.tsx | 2 +- .../modals/RunAgentInputs/RunAgentInputs.tsx | 2 +- .../OverflowText/OverflowText.stories.tsx | 157 ++++++++++++++++++ .../atoms/OverflowText/OverflowText.tsx | 100 +++++++++++ .../GoogleDrivePickerInput.tsx | 4 +- .../components/MobileNavbarMenuItem.tsx | 2 +- 6 files changed, 262 insertions(+), 5 deletions(-) create mode 100644 autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.stories.tsx create mode 100644 autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/components/CredentialRow/CredentialRow.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/components/CredentialRow/CredentialRow.tsx index 34c066e90d..7d6598d7be 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/components/CredentialRow/CredentialRow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/components/CredentialRow/CredentialRow.tsx @@ -70,7 +70,7 @@ export function CredentialRow({ {"*".repeat(MASKED_KEY_LENGTH)} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs.tsx index ea372193c5..d3e6fd9669 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs.tsx @@ -103,7 +103,7 @@ export function RunAgentInputs({ value={value} onChange={onChange} className="w-full" - showRemoveButton={false} + showRemoveButton={!readOnly} /> ); break; diff --git a/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.stories.tsx b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.stories.tsx new file mode 100644 index 0000000000..049948cd1b --- /dev/null +++ b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.stories.tsx @@ -0,0 +1,157 @@ +import type { Meta, StoryObj } from "@storybook/nextjs"; +import { OverflowText } from "./OverflowText"; + +const meta: Meta = { + title: "Atoms/OverflowText", + component: OverflowText, + tags: ["autodocs"], + parameters: { + layout: "centered", + docs: { + description: { + component: + "Text component that automatically truncates overflowing content with ellipsis and shows a tooltip on hover when truncated. Supports both string and ReactNode values.", + }, + }, + }, + argTypes: { + value: { + control: "text", + description: "The text content to display (string or ReactNode)", + }, + className: { + control: "text", + description: "Additional CSS classes to customize styling", + }, + }, + args: { + value: "This is a sample text that may overflow", + className: "", + }, +}; + +export default meta; +type Story = StoryObj; + +export const Default: Story = { + render: function DefaultOverflowText(args) { + return ( +
+ +
+ ); + }, +}; + +export const ShortText: Story = { + args: { + value: "Short text", + }, + render: function ShortTextStory(args) { + return ( +
+ +
+ ); + }, +}; + +export const LongText: Story = { + args: { + value: + "This is a very long text that will definitely overflow and show a tooltip when you hover over it", + }, + render: function LongTextStory(args) { + return ( +
+ +
+ ); + }, +}; + +export const CustomStyling: Story = { + args: { + value: "Text with custom styling", + className: "text-lg font-semibold text-indigo-600", + }, + render: function CustomStylingStory(args) { + return ( +
+ +
+ ); + }, +}; + +export const WithReactNode: Story = { + args: { + value: ( + + Text with bold and italic content + + ), + }, + render: function WithReactNodeStory(args) { + return ( +
+ +
+ ); + }, +}; + +export const DifferentWidths: Story = { + render: function DifferentWidthsStory() { + const longText = + "This text will truncate differently depending on the container width"; + return ( +
+
+ Width: 200px +
+ +
+
+
+ Width: 300px +
+ +
+
+
+ Width: 400px +
+ +
+
+
+ ); + }, +}; + +export const FilePathExample: Story = { + args: { + value: "/very/long/path/to/a/file/that/might/overflow/in/the/ui.tsx", + }, + render: function FilePathExampleStory(args) { + return ( +
+ +
+ ); + }, +}; + +export const URLExample: Story = { + args: { + value: "https://example.com/very/long/url/path/that/might/overflow", + }, + render: function URLExampleStory(args) { + return ( +
+ +
+ ); + }, +}; diff --git a/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx new file mode 100644 index 0000000000..efc345f79c --- /dev/null +++ b/autogpt_platform/frontend/src/components/atoms/OverflowText/OverflowText.tsx @@ -0,0 +1,100 @@ +import { Text, type TextProps } from "@/components/atoms/Text/Text"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; +import { cn } from "@/lib/utils"; +import type { ReactNode } from "react"; +import { useEffect, useRef, useState } from "react"; + +interface Props extends Omit { + value: string | ReactNode; +} + +export function OverflowText(props: Props) { + const elementRef = useRef(null); + const [isTruncated, setIsTruncated] = useState(false); + + function updateTruncation() { + const element = elementRef.current; + + if (!element) { + return; + } + + const hasOverflow = element.scrollWidth > element.clientWidth; + + setIsTruncated(hasOverflow); + } + + function setupResizeListener() { + function handleResize() { + updateTruncation(); + } + + window.addEventListener("resize", handleResize); + + return function cleanupResizeListener() { + window.removeEventListener("resize", handleResize); + }; + } + + function setupObserver() { + const element = elementRef.current; + + if (!element || typeof ResizeObserver === "undefined") { + return undefined; + } + + function handleResizeObserver() { + updateTruncation(); + } + + const observer = new ResizeObserver(handleResizeObserver); + + observer.observe(element); + + return function disconnectObserver() { + observer.disconnect(); + }; + } + + useEffect(() => { + if (typeof props.value === "string") updateTruncation(); + }, [props.value]); + + useEffect(setupResizeListener, []); + useEffect(setupObserver, []); + + const { value, className, variant = "body", ...restProps } = props; + + const content = ( + + + {value} + + + ); + + if (isTruncated) { + return ( + + + {content} + + {typeof value === "string" ?

{value}

: value} +
+
+
+ ); + } + + return content; +} diff --git a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput.tsx b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput.tsx index 1db9809de2..2a1ada5012 100644 --- a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput.tsx +++ b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput.tsx @@ -5,7 +5,7 @@ import { Cross2Icon } from "@radix-ui/react-icons"; import React, { useCallback } from "react"; import { GoogleDrivePicker } from "./GoogleDrivePicker"; -export interface GoogleDrivePickerInputProps { +export interface Props { config: GoogleDrivePickerConfig; value: any; onChange: (value: any) => void; @@ -21,7 +21,7 @@ export function GoogleDrivePickerInput({ error, className, showRemoveButton = true, -}: GoogleDrivePickerInputProps) { +}: Props) { const [pickerError, setPickerError] = React.useState(null); const isMultiSelect = config.multiselect || false; const hasAutoCredentials = !!config.auto_credentials; diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/MobileNavbar/components/MobileNavbarMenuItem.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/MobileNavbar/components/MobileNavbarMenuItem.tsx index fb061f3e0a..fa190b63b7 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/MobileNavbar/components/MobileNavbarMenuItem.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/MobileNavbar/components/MobileNavbarMenuItem.tsx @@ -19,7 +19,7 @@ export function MobileNavbarMenuItem({ onClick, }: Props) { const content = ( -
+
{getAccountMenuOptionIcon(icon)}
Date: Thu, 18 Dec 2025 19:51:24 +0100 Subject: [PATCH 09/25] fix(frontend): modal hidden overflow (#11642) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ ### Before Screenshot 2025-12-18 at 19 07 37 ### After Screenshot 2025-12-18 at 19 02 12 ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally and check crop --- .../src/components/molecules/Dialog/components/styles.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/components/molecules/Dialog/components/styles.ts b/autogpt_platform/frontend/src/components/molecules/Dialog/components/styles.ts index 3b7d12e8e9..873c33959b 100644 --- a/autogpt_platform/frontend/src/components/molecules/Dialog/components/styles.ts +++ b/autogpt_platform/frontend/src/components/molecules/Dialog/components/styles.ts @@ -3,8 +3,7 @@ const commonStyles = { title: "font-poppins text-md md:text-lg leading-none", overlay: "fixed inset-0 z-50 bg-stone-500/20 dark:bg-black/50 backdrop-blur-md animate-fade-in", - content: - "overflow-y-hidden bg-white p-6 fixed rounded-2xlarge flex flex-col z-50 w-full", + content: "bg-white p-6 fixed rounded-2xlarge flex flex-col z-50 w-full", }; // Modal specific styles From eed07b173a819b4c59ba508b9bee9afdfd68bff7 Mon Sep 17 00:00:00 2001 From: Bently Date: Thu, 18 Dec 2025 19:07:40 +0100 Subject: [PATCH 10/25] fix(frontend/builder): automatically frame agent when opening in builder (#11640) ## Summary - Fixed auto-frame timing in new builder - now calls `fitView` after nodes are rendered instead of on mount - Replaced manual viewport calculation in legacy builder with React Flow's `fitView` for consistency - Both builders now properly center and frame all blocks when opening an agent ## Test plan - [x] Open an existing agent with multiple blocks in the new builder - verify all blocks are visible and centered - [x] Open an existing agent in the legacy builder - verify all blocks are visible and centered - [x] Verify the manual "Frame" button still works correctly --- .../components/FlowEditor/Flow/useFlow.ts | 32 +++++++++++++++-- .../components/legacy-builder/Flow/Flow.tsx | 35 ++++++++----------- 2 files changed, 44 insertions(+), 23 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts index bf0ebf0a97..be76c4ec2b 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts @@ -20,6 +20,7 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut export const useFlow = () => { const [isLocked, setIsLocked] = useState(false); + const [hasAutoFramed, setHasAutoFramed] = useState(false); const addNodes = useNodeStore(useShallow((state) => state.addNodes)); const addLinks = useEdgeStore(useShallow((state) => state.addLinks)); const updateNodeStatus = useNodeStore( @@ -187,9 +188,36 @@ export const useFlow = () => { }; }, []); + const linkCount = graph?.links?.length ?? 0; + useEffect(() => { - fitView({ padding: 0.2, duration: 800, maxZoom: 2 }); - }, [fitView]); + if (isGraphLoading || isBlocksLoading) { + setHasAutoFramed(false); + return; + } + + if (hasAutoFramed) { + return; + } + + const rafId = requestAnimationFrame(() => { + fitView({ padding: 0.2, duration: 800, maxZoom: 1 }); + setHasAutoFramed(true); + }); + + return () => cancelAnimationFrame(rafId); + }, [ + fitView, + hasAutoFramed, + customNodes.length, + isBlocksLoading, + isGraphLoading, + linkCount, + ]); + + useEffect(() => { + setHasAutoFramed(false); + }, [flowID, flowVersion]); // Drag and drop block from block menu const onDragOver = useCallback((event: React.DragEvent) => { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx index f80a480542..7e9b54d626 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx @@ -103,6 +103,7 @@ const FlowEditor: React.FC<{ updateNode, getViewport, setViewport, + fitView, screenToFlowPosition, } = useReactFlow(); const [nodeId, setNodeId] = useState(1); @@ -115,6 +116,7 @@ const FlowEditor: React.FC<{ const [pinBlocksPopover, setPinBlocksPopover] = useState(false); // State to control if save popover should be pinned open const [pinSavePopover, setPinSavePopover] = useState(false); + const [hasAutoFramed, setHasAutoFramed] = useState(false); const { agentName, @@ -482,35 +484,26 @@ const FlowEditor: React.FC<{ return uuidv4(); }, []); - // Set the initial view port to center the canvas. useEffect(() => { - const { x, y } = getViewport(); - if (nodes.length <= 0 || x !== 0 || y !== 0) { + if (nodes.length === 0) { return; } - const topLeft = { x: Infinity, y: Infinity }; - const bottomRight = { x: -Infinity, y: -Infinity }; + if (hasAutoFramed) { + return; + } - nodes.forEach((node) => { - const { x, y } = node.position; - topLeft.x = Math.min(topLeft.x, x); - topLeft.y = Math.min(topLeft.y, y); - // Rough estimate of the width and height of the node: 500x400. - bottomRight.x = Math.max(bottomRight.x, x + 500); - bottomRight.y = Math.max(bottomRight.y, y + 400); + const rafId = requestAnimationFrame(() => { + fitView({ padding: 0.2, duration: 800, maxZoom: 1 }); + setHasAutoFramed(true); }); - const centerX = (topLeft.x + bottomRight.x) / 2; - const centerY = (topLeft.y + bottomRight.y) / 2; - const zoom = 0.8; + return () => cancelAnimationFrame(rafId); + }, [fitView, hasAutoFramed, nodes.length]); - setViewport({ - x: window.innerWidth / 2 - centerX * zoom, - y: window.innerHeight / 2 - centerY * zoom, - zoom: zoom, - }); - }, [nodes, getViewport, setViewport]); + useEffect(() => { + setHasAutoFramed(false); + }, [flowID, flowVersion]); const navigateToNode = useCallback( (nodeId: string) => { From b76b5a37c523dbd26d4d40ef6d0fdb6dcdd5afa9 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Fri, 19 Dec 2025 13:19:58 +0100 Subject: [PATCH 11/25] fix(backend): Convert generic exceptions to appropriate typed exceptions (#11641) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Fix TimeoutError in AIShortformVideoCreatorBlock → BlockExecutionError - Fix generic exceptions in SearchTheWebBlock → BlockExecutionError with proper HTTP error handling - Fix FirecrawlError 504 timeouts → BlockExecutionError with service-specific messages - Fix ReplicateBlock validation errors → BlockInputError for 422 status, BlockExecutionError for others - Add comprehensive HTTP error handling with HTTPClientError/HTTPServerError classes - Implement filename sanitization for "File name too long" errors - Add proper User-Agent handling for Wikipedia API compliance - Fix type conversion for string subclasses like ShortTextType - Add support for moderation errors with proper context propagation ## Test plan - [x] All modified blocks now properly categorize errors instead of raising BlockUnknownError - [x] Type conversion tests pass for ShortTextType and other string subclasses - [x] Formatting and linting pass - [x] Exception constructors include required block_name and block_id parameters 🤖 Generated with [Claude Code](https://claude.ai/code) --------- Co-authored-by: Claude --- .../blocks/ai_shortform_video_block.py | 19 +++++- .../backend/backend/blocks/branching.py | 5 +- .../backend/blocks/firecrawl/extract.py | 20 ++++-- .../backend/backend/blocks/flux_kontext.py | 25 +++++-- .../backend/backend/blocks/ideogram.py | 13 ++-- .../backend/backend/blocks/jina/search.py | 13 +++- .../blocks/replicate/replicate_block.py | 25 ++++++- .../backend/backend/blocks/search.py | 14 ++-- autogpt_platform/backend/backend/util/file.py | 64 +++++++++++++++--- .../backend/backend/util/request.py | 37 ++++++++++- autogpt_platform/backend/backend/util/type.py | 66 +++++++++++-------- .../backend/backend/util/type_test.py | 14 ++++ 12 files changed, 248 insertions(+), 67 deletions(-) diff --git a/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py b/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py index 07ac026ec4..7242ff8304 100644 --- a/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py +++ b/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py @@ -20,6 +20,7 @@ from backend.data.model import ( SchemaField, ) from backend.integrations.providers import ProviderName +from backend.util.exceptions import BlockExecutionError from backend.util.request import Requests TEST_CREDENTIALS = APIKeyCredentials( @@ -246,7 +247,11 @@ class AIShortformVideoCreatorBlock(Block): await asyncio.sleep(10) logger.error("Video creation timed out") - raise TimeoutError("Video creation timed out") + raise BlockExecutionError( + message="Video creation timed out", + block_name=self.name, + block_id=self.id, + ) def __init__(self): super().__init__( @@ -422,7 +427,11 @@ class AIAdMakerVideoCreatorBlock(Block): await asyncio.sleep(10) logger.error("Video creation timed out") - raise TimeoutError("Video creation timed out") + raise BlockExecutionError( + message="Video creation timed out", + block_name=self.name, + block_id=self.id, + ) def __init__(self): super().__init__( @@ -599,7 +608,11 @@ class AIScreenshotToVideoAdBlock(Block): await asyncio.sleep(10) logger.error("Video creation timed out") - raise TimeoutError("Video creation timed out") + raise BlockExecutionError( + message="Video creation timed out", + block_name=self.name, + block_id=self.id, + ) def __init__(self): super().__init__( diff --git a/autogpt_platform/backend/backend/blocks/branching.py b/autogpt_platform/backend/backend/blocks/branching.py index 326fe2b8b1..e9177a8b65 100644 --- a/autogpt_platform/backend/backend/blocks/branching.py +++ b/autogpt_platform/backend/backend/blocks/branching.py @@ -106,7 +106,10 @@ class ConditionBlock(Block): ComparisonOperator.LESS_THAN_OR_EQUAL: lambda a, b: a <= b, } - result = comparison_funcs[operator](value1, value2) + try: + result = comparison_funcs[operator](value1, value2) + except Exception as e: + raise ValueError(f"Comparison failed: {e}") from e yield "result", result diff --git a/autogpt_platform/backend/backend/blocks/firecrawl/extract.py b/autogpt_platform/backend/backend/blocks/firecrawl/extract.py index 4f54b102a0..e5fd5ec9f3 100755 --- a/autogpt_platform/backend/backend/blocks/firecrawl/extract.py +++ b/autogpt_platform/backend/backend/blocks/firecrawl/extract.py @@ -15,6 +15,7 @@ from backend.sdk import ( SchemaField, cost, ) +from backend.util.exceptions import BlockExecutionError from ._config import firecrawl @@ -59,11 +60,18 @@ class FirecrawlExtractBlock(Block): ) -> BlockOutput: app = FirecrawlApp(api_key=credentials.api_key.get_secret_value()) - extract_result = app.extract( - urls=input_data.urls, - prompt=input_data.prompt, - schema=input_data.output_schema, - enable_web_search=input_data.enable_web_search, - ) + try: + extract_result = app.extract( + urls=input_data.urls, + prompt=input_data.prompt, + schema=input_data.output_schema, + enable_web_search=input_data.enable_web_search, + ) + except Exception as e: + raise BlockExecutionError( + message=f"Extract failed: {e}", + block_name=self.name, + block_id=self.id, + ) from e yield "data", extract_result.data diff --git a/autogpt_platform/backend/backend/blocks/flux_kontext.py b/autogpt_platform/backend/backend/blocks/flux_kontext.py index 908d0962ed..dd8375c4ce 100644 --- a/autogpt_platform/backend/backend/blocks/flux_kontext.py +++ b/autogpt_platform/backend/backend/blocks/flux_kontext.py @@ -19,6 +19,7 @@ from backend.data.model import ( SchemaField, ) from backend.integrations.providers import ProviderName +from backend.util.exceptions import ModerationError from backend.util.file import MediaFileType, store_media_file TEST_CREDENTIALS = APIKeyCredentials( @@ -153,6 +154,8 @@ class AIImageEditorBlock(Block): ), aspect_ratio=input_data.aspect_ratio.value, seed=input_data.seed, + user_id=user_id, + graph_exec_id=graph_exec_id, ) yield "output_image", result @@ -164,6 +167,8 @@ class AIImageEditorBlock(Block): input_image_b64: Optional[str], aspect_ratio: str, seed: Optional[int], + user_id: str, + graph_exec_id: str, ) -> MediaFileType: client = ReplicateClient(api_token=api_key.get_secret_value()) input_params = { @@ -173,11 +178,21 @@ class AIImageEditorBlock(Block): **({"seed": seed} if seed is not None else {}), } - output: FileOutput | list[FileOutput] = await client.async_run( # type: ignore - model_name, - input=input_params, - wait=False, - ) + try: + output: FileOutput | list[FileOutput] = await client.async_run( # type: ignore + model_name, + input=input_params, + wait=False, + ) + except Exception as e: + if "flagged as sensitive" in str(e).lower(): + raise ModerationError( + message="Content was flagged as sensitive by the model provider", + user_id=user_id, + graph_exec_id=graph_exec_id, + moderation_type="model_provider", + ) + raise ValueError(f"Model execution failed: {e}") from e if isinstance(output, list) and output: output = output[0] diff --git a/autogpt_platform/backend/backend/blocks/ideogram.py b/autogpt_platform/backend/backend/blocks/ideogram.py index 3eab66aa5f..09a384c74a 100644 --- a/autogpt_platform/backend/backend/blocks/ideogram.py +++ b/autogpt_platform/backend/backend/blocks/ideogram.py @@ -2,7 +2,6 @@ from enum import Enum from typing import Any, Dict, Literal, Optional from pydantic import SecretStr -from requests.exceptions import RequestException from backend.data.block import ( Block, @@ -332,8 +331,8 @@ class IdeogramModelBlock(Block): try: response = await Requests().post(url, headers=headers, json=data) return response.json()["data"][0]["url"] - except RequestException as e: - raise Exception(f"Failed to fetch image with V3 endpoint: {str(e)}") + except Exception as e: + raise ValueError(f"Failed to fetch image with V3 endpoint: {e}") from e async def _run_model_legacy( self, @@ -385,8 +384,8 @@ class IdeogramModelBlock(Block): try: response = await Requests().post(url, headers=headers, json=data) return response.json()["data"][0]["url"] - except RequestException as e: - raise Exception(f"Failed to fetch image with legacy endpoint: {str(e)}") + except Exception as e: + raise ValueError(f"Failed to fetch image with legacy endpoint: {e}") from e async def upscale_image(self, api_key: SecretStr, image_url: str): url = "https://api.ideogram.ai/upscale" @@ -413,5 +412,5 @@ class IdeogramModelBlock(Block): return (response.json())["data"][0]["url"] - except RequestException as e: - raise Exception(f"Failed to upscale image: {str(e)}") + except Exception as e: + raise ValueError(f"Failed to upscale image: {e}") from e diff --git a/autogpt_platform/backend/backend/blocks/jina/search.py b/autogpt_platform/backend/backend/blocks/jina/search.py index e1af44b41b..05cddcc1df 100644 --- a/autogpt_platform/backend/backend/blocks/jina/search.py +++ b/autogpt_platform/backend/backend/blocks/jina/search.py @@ -16,6 +16,7 @@ from backend.data.block import ( BlockSchemaOutput, ) from backend.data.model import SchemaField +from backend.util.exceptions import BlockExecutionError class SearchTheWebBlock(Block, GetRequest): @@ -56,7 +57,17 @@ class SearchTheWebBlock(Block, GetRequest): # Prepend the Jina Search URL to the encoded query jina_search_url = f"https://s.jina.ai/{encoded_query}" - results = await self.get_request(jina_search_url, headers=headers, json=False) + + try: + results = await self.get_request( + jina_search_url, headers=headers, json=False + ) + except Exception as e: + raise BlockExecutionError( + message=f"Search failed: {e}", + block_name=self.name, + block_id=self.id, + ) from e # Output the search results yield "results", results diff --git a/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py b/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py index 8cf104edc2..7ee054d02e 100644 --- a/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py +++ b/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py @@ -18,6 +18,7 @@ from backend.data.block import ( BlockSchemaOutput, ) from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField +from backend.util.exceptions import BlockExecutionError, BlockInputError logger = logging.getLogger(__name__) @@ -111,9 +112,27 @@ class ReplicateModelBlock(Block): yield "status", "succeeded" yield "model_name", input_data.model_name except Exception as e: - error_msg = f"Unexpected error running Replicate model: {str(e)}" - logger.error(error_msg) - raise RuntimeError(error_msg) + error_msg = str(e) + logger.error(f"Error running Replicate model: {error_msg}") + + # Input validation errors (422, 400) → BlockInputError + if ( + "422" in error_msg + or "Input validation failed" in error_msg + or "400" in error_msg + ): + raise BlockInputError( + message=f"Invalid model inputs: {error_msg}", + block_name=self.name, + block_id=self.id, + ) from e + # Everything else → BlockExecutionError + else: + raise BlockExecutionError( + message=f"Replicate model error: {error_msg}", + block_name=self.name, + block_id=self.id, + ) from e async def run_model(self, model_ref: str, model_inputs: dict, api_key: SecretStr): """ diff --git a/autogpt_platform/backend/backend/blocks/search.py b/autogpt_platform/backend/backend/blocks/search.py index 094db23544..2d10dffab6 100644 --- a/autogpt_platform/backend/backend/blocks/search.py +++ b/autogpt_platform/backend/backend/blocks/search.py @@ -45,10 +45,16 @@ class GetWikipediaSummaryBlock(Block, GetRequest): async def run(self, input_data: Input, **kwargs) -> BlockOutput: topic = input_data.topic url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{topic}" - response = await self.get_request(url, json=True) - if "extract" not in response: - raise RuntimeError(f"Unable to parse Wikipedia response: {response}") - yield "summary", response["extract"] + + # Note: User-Agent is now automatically set by the request library + # to comply with Wikimedia's robot policy (https://w.wiki/4wJS) + try: + response = await self.get_request(url, json=True) + if "extract" not in response: + raise ValueError(f"Unable to parse Wikipedia response: {response}") + yield "summary", response["extract"] + except Exception as e: + raise ValueError(f"Failed to fetch Wikipedia summary: {e}") from e TEST_CREDENTIALS = APIKeyCredentials( diff --git a/autogpt_platform/backend/backend/util/file.py b/autogpt_platform/backend/backend/util/file.py index edf302fb34..dc8f86ea41 100644 --- a/autogpt_platform/backend/backend/util/file.py +++ b/autogpt_platform/backend/backend/util/file.py @@ -14,12 +14,47 @@ from backend.util.virus_scanner import scan_content_safe TEMP_DIR = Path(tempfile.gettempdir()).resolve() +# Maximum filename length (conservative limit for most filesystems) +MAX_FILENAME_LENGTH = 200 + + +def sanitize_filename(filename: str) -> str: + """ + Sanitize and truncate filename to prevent filesystem errors. + """ + # Remove or replace invalid characters + sanitized = re.sub(r'[<>:"/\\|?*\n\r\t]', "_", filename) + + # Truncate if too long + if len(sanitized) > MAX_FILENAME_LENGTH: + # Keep the extension if possible + if "." in sanitized: + name, ext = sanitized.rsplit(".", 1) + max_name_length = MAX_FILENAME_LENGTH - len(ext) - 1 + sanitized = name[:max_name_length] + "." + ext + else: + sanitized = sanitized[:MAX_FILENAME_LENGTH] + + # Ensure it's not empty or just dots + if not sanitized or sanitized.strip(".") == "": + sanitized = f"file_{uuid.uuid4().hex[:8]}" + + return sanitized + def get_exec_file_path(graph_exec_id: str, path: str) -> str: """ Utility to build an absolute path in the {temp}/exec_file/{exec_id}/... folder. """ - return str(TEMP_DIR / "exec_file" / graph_exec_id / path) + try: + full_path = TEMP_DIR / "exec_file" / graph_exec_id / path + return str(full_path) + except OSError as e: + if "File name too long" in str(e): + raise ValueError( + f"File path too long: {len(path)} characters. Maximum path length exceeded." + ) from e + raise ValueError(f"Invalid file path: {e}") from e def clean_exec_files(graph_exec_id: str, file: str = "") -> None: @@ -117,8 +152,11 @@ async def store_media_file( # Generate filename from cloud path _, path_part = cloud_storage.parse_cloud_path(file) - filename = Path(path_part).name or f"{uuid.uuid4()}.bin" - target_path = _ensure_inside_base(base_path / filename, base_path) + filename = sanitize_filename(Path(path_part).name or f"{uuid.uuid4()}.bin") + try: + target_path = _ensure_inside_base(base_path / filename, base_path) + except OSError as e: + raise ValueError(f"Invalid file path '{filename}': {e}") from e # Check file size limit if len(cloud_content) > MAX_FILE_SIZE: @@ -144,7 +182,10 @@ async def store_media_file( # Generate filename and decode extension = _extension_from_mime(mime_type) filename = f"{uuid.uuid4()}{extension}" - target_path = _ensure_inside_base(base_path / filename, base_path) + try: + target_path = _ensure_inside_base(base_path / filename, base_path) + except OSError as e: + raise ValueError(f"Invalid file path '{filename}': {e}") from e content = base64.b64decode(b64_content) # Check file size limit @@ -160,8 +201,11 @@ async def store_media_file( elif file.startswith(("http://", "https://")): # URL parsed_url = urlparse(file) - filename = Path(parsed_url.path).name or f"{uuid.uuid4()}" - target_path = _ensure_inside_base(base_path / filename, base_path) + filename = sanitize_filename(Path(parsed_url.path).name or f"{uuid.uuid4()}") + try: + target_path = _ensure_inside_base(base_path / filename, base_path) + except OSError as e: + raise ValueError(f"Invalid file path '{filename}': {e}") from e # Download and save resp = await Requests().get(file) @@ -177,8 +221,12 @@ async def store_media_file( target_path.write_bytes(resp.content) else: - # Local path - target_path = _ensure_inside_base(base_path / file, base_path) + # Local path - sanitize the filename part to prevent long filename errors + sanitized_file = sanitize_filename(file) + try: + target_path = _ensure_inside_base(base_path / sanitized_file, base_path) + except OSError as e: + raise ValueError(f"Invalid file path '{sanitized_file}': {e}") from e if not target_path.is_file(): raise ValueError(f"Local file does not exist: {target_path}") diff --git a/autogpt_platform/backend/backend/util/request.py b/autogpt_platform/backend/backend/util/request.py index 4887cfa02d..9744372b15 100644 --- a/autogpt_platform/backend/backend/util/request.py +++ b/autogpt_platform/backend/backend/util/request.py @@ -21,6 +21,26 @@ from tenacity import ( from backend.util.json import loads + +class HTTPClientError(Exception): + """4xx client errors (400-499)""" + + def __init__(self, message: str, status_code: int): + super().__init__(message) + self.status_code = status_code + + +class HTTPServerError(Exception): + """5xx server errors (500-599)""" + + def __init__(self, message: str, status_code: int): + super().__init__(message) + self.status_code = status_code + + +# Default User-Agent for all requests +DEFAULT_USER_AGENT = "AutoGPT-Platform/1.0 (https://github.com/Significant-Gravitas/AutoGPT; info@agpt.co) aiohttp" + # Retry status codes for which we will automatically retry the request THROTTLE_RETRY_STATUS_CODES: set[int] = {429, 500, 502, 503, 504, 408} @@ -450,6 +470,10 @@ class Requests: if self.extra_headers is not None: req_headers.update(self.extra_headers) + # Set default User-Agent if not provided + if "User-Agent" not in req_headers and "user-agent" not in req_headers: + req_headers["User-Agent"] = DEFAULT_USER_AGENT + # Override Host header if using IP connection if connector: req_headers["Host"] = hostname @@ -476,9 +500,16 @@ class Requests: response.raise_for_status() except ClientResponseError as e: body = await response.read() - raise Exception( - f"HTTP {response.status} Error: {response.reason}, Body: {body.decode(errors='replace')}" - ) from e + error_message = f"HTTP {response.status} Error: {response.reason}, Body: {body.decode(errors='replace')}" + + # Raise specific exceptions based on status code range + if 400 <= response.status <= 499: + raise HTTPClientError(error_message, response.status) from e + elif 500 <= response.status <= 599: + raise HTTPServerError(error_message, response.status) from e + else: + # Generic fallback for other HTTP errors + raise Exception(error_message) from e # If allowed and a redirect is received, follow the redirect manually if allow_redirects and response.status in (301, 302, 303, 307, 308): diff --git a/autogpt_platform/backend/backend/util/type.py b/autogpt_platform/backend/backend/util/type.py index e1cda80203..2402011669 100644 --- a/autogpt_platform/backend/backend/util/type.py +++ b/autogpt_platform/backend/backend/util/type.py @@ -5,6 +5,13 @@ from typing import Any, Type, TypeVar, Union, cast, get_args, get_origin, overlo from prisma import Json as PrismaJson +def _is_type_or_subclass(origin: Any, target_type: type) -> bool: + """Check if origin is exactly the target type or a subclass of it.""" + return origin is target_type or ( + isinstance(origin, type) and issubclass(origin, target_type) + ) + + class ConversionError(ValueError): pass @@ -138,7 +145,11 @@ def _try_convert(value: Any, target_type: Any, raise_on_mismatch: bool) -> Any: if origin is None: origin = target_type - if origin not in [list, dict, tuple, str, set, int, float, bool]: + # Early return for unsupported types (skip subclasses of supported types) + supported_types = [list, dict, tuple, str, set, int, float, bool] + if origin not in supported_types and not ( + isinstance(origin, type) and any(issubclass(origin, t) for t in supported_types) + ): return value # Handle the case when value is already of the target type @@ -168,44 +179,47 @@ def _try_convert(value: Any, target_type: Any, raise_on_mismatch: bool) -> Any: raise TypeError(f"Value {value} is not of expected type {target_type}") else: # Need to convert value to the origin type - if origin is list: - value = __convert_list(value) + if _is_type_or_subclass(origin, list): + converted_list = __convert_list(value) if args: - return [convert(v, args[0]) for v in value] - else: - return value - elif origin is dict: - value = __convert_dict(value) + converted_list = [convert(v, args[0]) for v in converted_list] + return origin(converted_list) if origin is not list else converted_list + elif _is_type_or_subclass(origin, dict): + converted_dict = __convert_dict(value) if args: key_type, val_type = args - return { - convert(k, key_type): convert(v, val_type) for k, v in value.items() + converted_dict = { + convert(k, key_type): convert(v, val_type) + for k, v in converted_dict.items() } - else: - return value - elif origin is tuple: - value = __convert_tuple(value) + return origin(converted_dict) if origin is not dict else converted_dict + elif _is_type_or_subclass(origin, tuple): + converted_tuple = __convert_tuple(value) if args: if len(args) == 1: - return tuple(convert(v, args[0]) for v in value) + converted_tuple = tuple( + convert(v, args[0]) for v in converted_tuple + ) else: - return tuple(convert(v, t) for v, t in zip(value, args)) - else: - return value - elif origin is str: - return __convert_str(value) - elif origin is set: + converted_tuple = tuple( + convert(v, t) for v, t in zip(converted_tuple, args) + ) + return origin(converted_tuple) if origin is not tuple else converted_tuple + elif _is_type_or_subclass(origin, str): + converted_str = __convert_str(value) + return origin(converted_str) if origin is not str else converted_str + elif _is_type_or_subclass(origin, set): value = __convert_set(value) if args: return {convert(v, args[0]) for v in value} else: return value - elif origin is int: - return __convert_num(value, int) - elif origin is float: - return __convert_num(value, float) - elif origin is bool: + elif _is_type_or_subclass(origin, bool): return __convert_bool(value) + elif _is_type_or_subclass(origin, int): + return __convert_num(value, int) + elif _is_type_or_subclass(origin, float): + return __convert_num(value, float) else: return value diff --git a/autogpt_platform/backend/backend/util/type_test.py b/autogpt_platform/backend/backend/util/type_test.py index becadf48b2..920776edbf 100644 --- a/autogpt_platform/backend/backend/util/type_test.py +++ b/autogpt_platform/backend/backend/util/type_test.py @@ -32,3 +32,17 @@ def test_type_conversion(): assert convert("5", List[int]) == [5] assert convert("[5,4,2]", List[int]) == [5, 4, 2] assert convert([5, 4, 2], List[str]) == ["5", "4", "2"] + + # Test the specific case that was failing: empty list to Optional[str] + assert convert([], Optional[str]) == "[]" + assert convert([], str) == "[]" + + # Test the actual failing case: empty list to ShortTextType + from backend.util.type import ShortTextType + + assert convert([], Optional[ShortTextType]) == "[]" + assert convert([], ShortTextType) == "[]" + + # Test other empty list conversions + assert convert([], int) == 0 # len([]) = 0 + assert convert([], Optional[int]) == 0 From 3dbc03e488b90ffaf2b478b1fca842032fc9e8bb Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Fri, 19 Dec 2025 21:05:16 +0100 Subject: [PATCH 12/25] feat(platform): OAuth API & Single Sign-On (#11617) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We want to provide Single Sign-On for multiple AutoGPT apps that use the Platform as their backend. ### Changes 🏗️ Backend: - DB + logic + API for OAuth flow (w/ tests) - DB schema additions for OAuth apps, codes, and tokens - Token creation/validation/management logic - OAuth flow endpoints (app info, authorize, token exchange, introspect, revoke) - E2E OAuth API integration tests - Other OAuth-related endpoints (upload app logo, list owned apps, external `/me` endpoint) - App logo asset management - Adjust external API middleware to support auth with access token - Expired token clean-up job - Add `OAUTH_TOKEN_CLEANUP_INTERVAL_HOURS` setting (optional) - `poetry run oauth-tool`: dev tool to test the OAuth flows and register new OAuth apps - `poetry run export-api-schema`: dev tool to quickly export the OpenAPI schema (much quicker than spinning up the backend) Frontend: - Frontend UI for app authorization (`/auth/authorize`) - Re-redirect after login/signup - Frontend flow to batch-auth integrations on request of the client app (`/auth/integrations/setup-wizard`) - Debug `CredentialInputs` component - Add `/profile/oauth-apps` management page - Add `isOurProblem` flag to `ErrorCard` to hide action buttons when the error isn't our fault - Add `showTitle` flag to `CredentialsInput` to hide built-in title for layout reasons DX: - Add [API guide](https://github.com/Significant-Gravitas/AutoGPT/blob/pwuts/sso/docs/content/platform/integrating/api-guide.md) and [OAuth guide](https://github.com/Significant-Gravitas/AutoGPT/blob/pwuts/sso/docs/content/platform/integrating/oauth-guide.md) ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Manually verify test coverage of OAuth API tests - Test `/auth/authorize` using `poetry run oauth-tool test-server` - [x] Works - [x] Looks okay - Test `/auth/integrations/setup-wizard` using `poetry run oauth-tool test-server` - [x] Works - [x] Looks okay - Test `/profile/oauth-apps` page - [x] All owned OAuth apps show up - [x] Enabling/disabling apps works - [ ] ~~Uploading logos works~~ can only test this once deployed to dev #### For configuration changes: - [x] `.env.default` is updated or already compatible with my changes - [x] `docker-compose.yml` is updated or already compatible with my changes - [x] I have included a list of my configuration changes in the PR description (under **Changes**) --- .../autogpt_libs/api_key/keysmith.py | 3 + .../backend/backend/cli/__init__.py | 1 + .../backend/cli/generate_openapi_json.py | 57 + .../backend/backend/cli/oauth_tool.py | 1177 +++++++++++ .../backend/data/{ => auth}/api_key.py | 19 +- .../backend/backend/data/auth/base.py | 15 + .../backend/backend/data/auth/oauth.py | 872 ++++++++ .../backend/backend/executor/scheduler.py | 23 + .../backend/server/external/middleware.py | 97 +- .../server/external/routes/integrations.py | 41 +- .../backend/server/external/routes/tools.py | 22 +- .../backend/server/external/routes/v1.py | 77 +- .../backend/backend/server/model.py | 2 +- .../backend/backend/server/rest_api.py | 6 + .../backend/backend/server/routers/oauth.py | 833 ++++++++ .../backend/server/routers/oauth_test.py | 1784 +++++++++++++++++ .../backend/backend/server/routers/v1.py | 2 +- .../backend/backend/util/settings.py | 7 + .../migration.sql | 129 ++ .../migration.sql | 5 + autogpt_platform/backend/pyproject.toml | 2 + autogpt_platform/backend/schema.prisma | 117 ++ .../backend/test/e2e_test_data.py | 6 +- .../app/(platform)/auth/authorize/page.tsx | 296 +++ .../src/app/(platform)/auth/callback/route.ts | 3 + .../auth/integrations/setup-wizard/page.tsx | 331 +++ .../CredentialsInputs/CredentialsInputs.tsx | 45 +- ...ntialsInputs.ts => useCredentialsInput.ts} | 119 +- .../src/app/(platform)/login/page.tsx | 10 +- .../src/app/(platform)/login/useLoginPage.ts | 22 +- .../APIKeySection/APIKeySection.tsx | 0 .../APIKeySection/useAPISection.ts} | 0 .../APIKeysModals/APIKeysModals.tsx | 0 .../APIKeysModals/useAPIkeysModals.ts} | 0 .../(user)/{api_keys => api-keys}/page.tsx | 2 +- .../app/(platform)/profile/(user)/layout.tsx | 50 +- .../components/OAuthAppsSection.tsx | 147 ++ .../oauth-apps/components/useOAuthApps.ts | 110 + .../profile/(user)/oauth-apps/page.tsx | 21 + .../src/app/(platform)/signup/page.tsx | 10 +- .../app/(platform)/signup/useSignupPage.ts | 23 +- .../src/app/api/mutators/custom-mutator.ts | 13 + .../frontend/src/app/api/openapi.json | 784 +++++++- .../molecules/ErrorCard/ErrorCard.tsx | 20 +- .../ErrorCard/components/ErrorMessage.tsx | 10 +- .../src/lib/autogpt-server-api/types.ts | 2 +- .../frontend/src/lib/supabase/helpers.ts | 11 +- .../src/lib/supabase/hooks/helpers.ts | 10 +- .../src/lib/supabase/hooks/useSupabase.ts | 17 +- .../lib/supabase/hooks/useSupabaseStore.ts | 18 +- .../frontend/src/lib/supabase/middleware.ts | 2 + autogpt_platform/frontend/src/middleware.ts | 12 +- .../frontend/src/tests/api-keys.spec.ts | 8 +- .../frontend/src/tests/profile-form.spec.ts | 2 +- .../frontend/src/tests/signin.spec.ts | 4 +- .../content/platform/integrating/api-guide.md | 85 + .../platform/integrating/oauth-guide.md | 440 ++++ docs/mkdocs.yml | 11 +- 58 files changed, 7672 insertions(+), 263 deletions(-) create mode 100644 autogpt_platform/backend/backend/cli/__init__.py create mode 100644 autogpt_platform/backend/backend/cli/generate_openapi_json.py create mode 100755 autogpt_platform/backend/backend/cli/oauth_tool.py rename autogpt_platform/backend/backend/data/{ => auth}/api_key.py (95%) create mode 100644 autogpt_platform/backend/backend/data/auth/base.py create mode 100644 autogpt_platform/backend/backend/data/auth/oauth.py create mode 100644 autogpt_platform/backend/backend/server/routers/oauth.py create mode 100644 autogpt_platform/backend/backend/server/routers/oauth_test.py create mode 100644 autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql create mode 100644 autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql create mode 100644 autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx rename autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/{useCredentialsInputs.ts => useCredentialsInput.ts} (76%) rename autogpt_platform/frontend/src/app/(platform)/profile/(user)/{api_keys => api-keys}/components/APIKeySection/APIKeySection.tsx (100%) rename autogpt_platform/frontend/src/app/(platform)/profile/(user)/{api_keys/components/APIKeySection/useAPISection.tsx => api-keys/components/APIKeySection/useAPISection.ts} (100%) rename autogpt_platform/frontend/src/app/(platform)/profile/(user)/{api_keys => api-keys}/components/APIKeysModals/APIKeysModals.tsx (100%) rename autogpt_platform/frontend/src/app/(platform)/profile/(user)/{api_keys/components/APIKeysModals/useAPIkeysModals.tsx => api-keys/components/APIKeysModals/useAPIkeysModals.ts} (100%) rename autogpt_platform/frontend/src/app/(platform)/profile/(user)/{api_keys => api-keys}/page.tsx (94%) create mode 100644 autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx create mode 100644 docs/content/platform/integrating/api-guide.md create mode 100644 docs/content/platform/integrating/oauth-guide.md diff --git a/autogpt_platform/autogpt_libs/autogpt_libs/api_key/keysmith.py b/autogpt_platform/autogpt_libs/autogpt_libs/api_key/keysmith.py index 394044a69d..aee7040288 100644 --- a/autogpt_platform/autogpt_libs/autogpt_libs/api_key/keysmith.py +++ b/autogpt_platform/autogpt_libs/autogpt_libs/api_key/keysmith.py @@ -57,6 +57,9 @@ class APIKeySmith: def hash_key(self, raw_key: str) -> tuple[str, str]: """Migrate a legacy hash to secure hash format.""" + if not raw_key.startswith(self.PREFIX): + raise ValueError("Key without 'agpt_' prefix would fail validation") + salt = self._generate_salt() hash = self._hash_key_with_salt(raw_key, salt) return hash, salt.hex() diff --git a/autogpt_platform/backend/backend/cli/__init__.py b/autogpt_platform/backend/backend/cli/__init__.py new file mode 100644 index 0000000000..d96b0c7d49 --- /dev/null +++ b/autogpt_platform/backend/backend/cli/__init__.py @@ -0,0 +1 @@ +"""CLI utilities for backend development & administration""" diff --git a/autogpt_platform/backend/backend/cli/generate_openapi_json.py b/autogpt_platform/backend/backend/cli/generate_openapi_json.py new file mode 100644 index 0000000000..313e603c44 --- /dev/null +++ b/autogpt_platform/backend/backend/cli/generate_openapi_json.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +""" +Script to generate OpenAPI JSON specification for the FastAPI app. + +This script imports the FastAPI app from backend.server.rest_api and outputs +the OpenAPI specification as JSON to stdout or a specified file. + +Usage: + `poetry run python generate_openapi_json.py` + `poetry run python generate_openapi_json.py --output openapi.json` + `poetry run python generate_openapi_json.py --indent 4 --output openapi.json` +""" + +import json +import os +from pathlib import Path + +import click + + +@click.command() +@click.option( + "--output", + type=click.Path(dir_okay=False, path_type=Path), + help="Output file path (default: stdout)", +) +@click.option( + "--pretty", + type=click.BOOL, + default=False, + help="Pretty-print JSON output (indented 2 spaces)", +) +def main(output: Path, pretty: bool): + """Generate and output the OpenAPI JSON specification.""" + openapi_schema = get_openapi_schema() + + json_output = json.dumps(openapi_schema, indent=2 if pretty else None) + + if output: + output.write_text(json_output) + click.echo(f"✅ OpenAPI specification written to {output}\n\nPreview:") + click.echo(f"\n{json_output[:500]} ...") + else: + print(json_output) + + +def get_openapi_schema(): + """Get the OpenAPI schema from the FastAPI app""" + from backend.server.rest_api import app + + return app.openapi() + + +if __name__ == "__main__": + os.environ["LOG_LEVEL"] = "ERROR" # disable stdout log output + + main() diff --git a/autogpt_platform/backend/backend/cli/oauth_tool.py b/autogpt_platform/backend/backend/cli/oauth_tool.py new file mode 100755 index 0000000000..57982d359b --- /dev/null +++ b/autogpt_platform/backend/backend/cli/oauth_tool.py @@ -0,0 +1,1177 @@ +#!/usr/bin/env python3 +""" +OAuth Application Credential Generator and Test Server + +Generates client IDs, client secrets, and SQL INSERT statements for OAuth applications. +Also provides a test server to test the OAuth flows end-to-end. + +Usage: + # Generate credentials interactively (recommended) + poetry run oauth-tool generate-app + + # Generate credentials with all options provided + poetry run oauth-tool generate-app \\ + --name "My App" \\ + --description "My application description" \\ + --redirect-uris "https://app.example.com/callback,http://localhost:3000/callback" \\ + --scopes "EXECUTE_GRAPH,READ_GRAPH" + + # Mix of options and interactive prompts + poetry run oauth-tool generate-app --name "My App" + + # Hash an existing plaintext secret (for secret rotation) + poetry run oauth-tool hash-secret "my-plaintext-secret" + + # Validate a plaintext secret against a hash and salt + poetry run oauth-tool validate-secret "my-plaintext-secret" "hash" "salt" + + # Run a test server to test OAuth flows + poetry run oauth-tool test-server --owner-id YOUR_USER_ID +""" + +import asyncio +import base64 +import hashlib +import secrets +import sys +import uuid +from datetime import datetime +from typing import Optional +from urllib.parse import urlparse + +import click +from autogpt_libs.api_key.keysmith import APIKeySmith +from prisma.enums import APIKeyPermission + +keysmith = APIKeySmith() + + +def generate_client_id() -> str: + """Generate a unique client ID""" + return f"agpt_client_{secrets.token_urlsafe(16)}" + + +def generate_client_secret() -> tuple[str, str, str]: + """ + Generate a client secret with its hash and salt. + Returns (plaintext_secret, hashed_secret, salt) + """ + # Generate a secure random secret (32 bytes = 256 bits of entropy) + plaintext = f"agpt_secret_{secrets.token_urlsafe(32)}" + + # Hash using Scrypt (same as API keys) + hashed, salt = keysmith.hash_key(plaintext) + + return plaintext, hashed, salt + + +def hash_secret(plaintext: str) -> tuple[str, str]: + """Hash a plaintext secret using Scrypt. Returns (hash, salt)""" + return keysmith.hash_key(plaintext) + + +def validate_secret(plaintext: str, hash_value: str, salt: str) -> bool: + """Validate a plaintext secret against a stored hash and salt""" + return keysmith.verify_key(plaintext, hash_value, salt) + + +def generate_app_credentials( + name: str, + redirect_uris: list[str], + scopes: list[str], + description: str | None = None, + grant_types: list[str] | None = None, +) -> dict: + """ + Generate complete credentials for an OAuth application. + + Returns dict with: + - id: UUID for the application + - name: Application name + - description: Application description + - client_id: Client identifier (plaintext) + - client_secret_plaintext: Client secret (SENSITIVE - show only once) + - client_secret_hash: Hashed client secret (for database) + - redirect_uris: List of allowed redirect URIs + - grant_types: List of allowed grant types + - scopes: List of allowed scopes + """ + if grant_types is None: + grant_types = ["authorization_code", "refresh_token"] + + # Validate scopes + try: + validated_scopes = [APIKeyPermission(s.strip()) for s in scopes if s.strip()] + except ValueError as e: + raise ValueError(f"Invalid scope: {e}") + + if not validated_scopes: + raise ValueError("At least one scope is required") + + # Generate credentials + app_id = str(uuid.uuid4()) + client_id = generate_client_id() + client_secret_plaintext, client_secret_hash, client_secret_salt = ( + generate_client_secret() + ) + + return { + "id": app_id, + "name": name, + "description": description, + "client_id": client_id, + "client_secret_plaintext": client_secret_plaintext, + "client_secret_hash": client_secret_hash, + "client_secret_salt": client_secret_salt, + "redirect_uris": redirect_uris, + "grant_types": grant_types, + "scopes": [s.value for s in validated_scopes], + } + + +def format_sql_insert(creds: dict) -> str: + """ + Format credentials as a SQL INSERT statement. + + The statement includes placeholders that must be replaced: + - YOUR_USER_ID_HERE: Replace with the owner's user ID + """ + now_iso = datetime.utcnow().isoformat() + + # Format arrays for PostgreSQL + redirect_uris_pg = ( + "{" + ",".join(f'"{uri}"' for uri in creds["redirect_uris"]) + "}" + ) + grant_types_pg = "{" + ",".join(f'"{gt}"' for gt in creds["grant_types"]) + "}" + scopes_pg = "{" + ",".join(creds["scopes"]) + "}" + + sql = f""" +-- ============================================================ +-- OAuth Application: {creds['name']} +-- Generated: {now_iso} UTC +-- ============================================================ + +INSERT INTO "OAuthApplication" ( + id, + "createdAt", + "updatedAt", + name, + description, + "clientId", + "clientSecret", + "clientSecretSalt", + "redirectUris", + "grantTypes", + scopes, + "ownerId", + "isActive" +) +VALUES ( + '{creds['id']}', + NOW(), + NOW(), + '{creds['name']}', + {f"'{creds['description']}'" if creds['description'] else 'NULL'}, + '{creds['client_id']}', + '{creds['client_secret_hash']}', + '{creds['client_secret_salt']}', + ARRAY{redirect_uris_pg}::TEXT[], + ARRAY{grant_types_pg}::TEXT[], + ARRAY{scopes_pg}::"APIKeyPermission"[], + 'YOUR_USER_ID_HERE', -- ⚠️ REPLACE with actual owner user ID + true +); + +-- ============================================================ +-- ⚠️ IMPORTANT: Save these credentials securely! +-- ============================================================ +-- +-- Client ID: {creds['client_id']} +-- Client Secret: {creds['client_secret_plaintext']} +-- +-- ⚠️ The client secret is shown ONLY ONCE! +-- ⚠️ Store it securely and share only with the application developer. +-- ⚠️ Never commit it to version control. +-- +-- The client secret has been hashed in the database using Scrypt. +-- The plaintext secret above is needed by the application to authenticate. +-- ============================================================ + +-- To verify the application was created: +-- SELECT "clientId", name, scopes, "redirectUris", "isActive" +-- FROM "OAuthApplication" +-- WHERE "clientId" = '{creds['client_id']}'; +""" + return sql + + +@click.group() +def cli(): + """OAuth Application Credential Generator + + Generates client IDs, client secrets, and SQL INSERT statements for OAuth applications. + Does NOT directly insert into the database - outputs SQL for manual execution. + """ + pass + + +AVAILABLE_SCOPES = [ + "EXECUTE_GRAPH", + "READ_GRAPH", + "EXECUTE_BLOCK", + "READ_BLOCK", + "READ_STORE", + "USE_TOOLS", + "MANAGE_INTEGRATIONS", + "READ_INTEGRATIONS", + "DELETE_INTEGRATIONS", +] + +DEFAULT_GRANT_TYPES = ["authorization_code", "refresh_token"] + + +def prompt_for_name() -> str: + """Prompt for application name""" + return click.prompt("Application name", type=str) + + +def prompt_for_description() -> str | None: + """Prompt for application description""" + description = click.prompt( + "Application description (optional, press Enter to skip)", + type=str, + default="", + show_default=False, + ) + return description if description else None + + +def prompt_for_redirect_uris() -> list[str]: + """Prompt for redirect URIs interactively""" + click.echo("\nRedirect URIs (enter one per line, empty line to finish):") + click.echo(" Example: https://app.example.com/callback") + uris = [] + while True: + uri = click.prompt(" URI", type=str, default="", show_default=False) + if not uri: + if not uris: + click.echo(" At least one redirect URI is required.") + continue + break + uris.append(uri.strip()) + return uris + + +def prompt_for_scopes() -> list[str]: + """Prompt for scopes interactively with a menu""" + click.echo("\nAvailable scopes:") + for i, scope in enumerate(AVAILABLE_SCOPES, 1): + click.echo(f" {i}. {scope}") + + click.echo( + "\nSelect scopes by number (comma-separated) or enter scope names directly:" + ) + click.echo(" Example: 1,2 or EXECUTE_GRAPH,READ_GRAPH") + + while True: + selection = click.prompt("Scopes", type=str) + scopes = [] + + for item in selection.split(","): + item = item.strip() + if not item: + continue + + # Check if it's a number + if item.isdigit(): + idx = int(item) - 1 + if 0 <= idx < len(AVAILABLE_SCOPES): + scopes.append(AVAILABLE_SCOPES[idx]) + else: + click.echo(f" Invalid number: {item}") + scopes = [] + break + # Check if it's a valid scope name + elif item.upper() in AVAILABLE_SCOPES: + scopes.append(item.upper()) + else: + click.echo(f" Invalid scope: {item}") + scopes = [] + break + + if scopes: + return scopes + click.echo(" Please enter valid scope numbers or names.") + + +def prompt_for_grant_types() -> list[str] | None: + """Prompt for grant types interactively""" + click.echo(f"\nGrant types (default: {', '.join(DEFAULT_GRANT_TYPES)})") + grant_types_input = click.prompt( + "Grant types (comma-separated, press Enter for default)", + type=str, + default="", + show_default=False, + ) + + if not grant_types_input: + return None # Use default + + return [gt.strip() for gt in grant_types_input.split(",") if gt.strip()] + + +@cli.command(name="generate-app") +@click.option( + "--name", + default=None, + help="Application name (e.g., 'My Cool App')", +) +@click.option( + "--description", + default=None, + help="Application description", +) +@click.option( + "--redirect-uris", + default=None, + help="Comma-separated list of redirect URIs (e.g., 'https://app.example.com/callback,http://localhost:3000/callback')", +) +@click.option( + "--scopes", + default=None, + help="Comma-separated list of scopes (e.g., 'EXECUTE_GRAPH,READ_GRAPH')", +) +@click.option( + "--grant-types", + default=None, + help="Comma-separated list of grant types (default: 'authorization_code,refresh_token')", +) +def generate_app( + name: str | None, + description: str | None, + redirect_uris: str | None, + scopes: str | None, + grant_types: str | None, +): + """Generate credentials for a new OAuth application + + All options are optional. If not provided, you will be prompted interactively. + """ + # Interactive prompts for missing required values + if name is None: + name = prompt_for_name() + + if description is None: + description = prompt_for_description() + + if redirect_uris is None: + redirect_uris_list = prompt_for_redirect_uris() + else: + redirect_uris_list = [uri.strip() for uri in redirect_uris.split(",")] + + if scopes is None: + scopes_list = prompt_for_scopes() + else: + scopes_list = [scope.strip() for scope in scopes.split(",")] + + if grant_types is None: + grant_types_list = prompt_for_grant_types() + else: + grant_types_list = [gt.strip() for gt in grant_types.split(",")] + + try: + creds = generate_app_credentials( + name=name, + description=description, + redirect_uris=redirect_uris_list, + scopes=scopes_list, + grant_types=grant_types_list, + ) + + sql = format_sql_insert(creds) + click.echo(sql) + + except ValueError as e: + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@cli.command(name="hash-secret") +@click.argument("secret") +def hash_secret_command(secret): + """Hash a plaintext secret using Scrypt""" + hashed, salt = hash_secret(secret) + click.echo(f"Hash: {hashed}") + click.echo(f"Salt: {salt}") + + +@cli.command(name="validate-secret") +@click.argument("secret") +@click.argument("hash") +@click.argument("salt") +def validate_secret_command(secret, hash, salt): + """Validate a plaintext secret against a hash and salt""" + is_valid = validate_secret(secret, hash, salt) + if is_valid: + click.echo("✓ Secret is valid!") + sys.exit(0) + else: + click.echo("✗ Secret is invalid!", err=True) + sys.exit(1) + + +# ============================================================================ +# Test Server Command +# ============================================================================ + +TEST_APP_NAME = "OAuth Test App (CLI)" +TEST_APP_DESCRIPTION = "Temporary test application created by oauth_admin CLI" +TEST_SERVER_PORT = 9876 + + +def generate_pkce() -> tuple[str, str]: + """Generate PKCE code_verifier and code_challenge (S256)""" + code_verifier = secrets.token_urlsafe(32) + code_challenge = ( + base64.urlsafe_b64encode(hashlib.sha256(code_verifier.encode()).digest()) + .decode() + .rstrip("=") + ) + return code_verifier, code_challenge + + +def create_test_html( + platform_url: str, + client_id: str, + client_secret: str, + redirect_uri: str, + backend_url: str, +) -> str: + """Generate HTML page for test OAuth client""" + return f""" + + + + + OAuth Test Client + + + +
+
+

🔐 OAuth Test Client

+

Test the "Sign in with AutoGPT" and Integration Setup flows

+ +
+ + {client_id} +
+ +
+ + +
+
+ + + +
+

📋 Request Log

+
Waiting for action...
+
+ +
+

⚙️ Configuration

+
+ + {platform_url} +
+
+ + {backend_url} +
+
+ + {redirect_uri} +
+
+
+ + + + +""" + + +async def create_test_app_in_db( + owner_id: str, + redirect_uri: str, +) -> dict: + """Create a temporary test OAuth application in the database""" + from prisma.models import OAuthApplication + + from backend.data import db + + # Connect to database + await db.connect() + + # Generate credentials + creds = generate_app_credentials( + name=TEST_APP_NAME, + description=TEST_APP_DESCRIPTION, + redirect_uris=[redirect_uri], + scopes=AVAILABLE_SCOPES, # All scopes for testing + ) + + # Insert into database + app = await OAuthApplication.prisma().create( + data={ + "id": creds["id"], + "name": creds["name"], + "description": creds["description"], + "clientId": creds["client_id"], + "clientSecret": creds["client_secret_hash"], + "clientSecretSalt": creds["client_secret_salt"], + "redirectUris": creds["redirect_uris"], + "grantTypes": creds["grant_types"], + "scopes": creds["scopes"], + "ownerId": owner_id, + "isActive": True, + } + ) + + click.echo(f"✓ Created test OAuth application: {app.clientId}") + + return { + "id": app.id, + "client_id": app.clientId, + "client_secret": creds["client_secret_plaintext"], + } + + +async def cleanup_test_app(app_id: str) -> None: + """Remove test application and all associated tokens from database""" + from prisma.models import ( + OAuthAccessToken, + OAuthApplication, + OAuthAuthorizationCode, + OAuthRefreshToken, + ) + + from backend.data import db + + if not db.is_connected(): + await db.connect() + + click.echo("\n🧹 Cleaning up test data...") + + # Delete authorization codes + deleted_codes = await OAuthAuthorizationCode.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_codes: + click.echo(f" Deleted {deleted_codes} authorization code(s)") + + # Delete access tokens + deleted_access = await OAuthAccessToken.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_access: + click.echo(f" Deleted {deleted_access} access token(s)") + + # Delete refresh tokens + deleted_refresh = await OAuthRefreshToken.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_refresh: + click.echo(f" Deleted {deleted_refresh} refresh token(s)") + + # Delete the application itself + await OAuthApplication.prisma().delete(where={"id": app_id}) + click.echo(" Deleted test OAuth application") + + await db.disconnect() + click.echo("✓ Cleanup complete!") + + +def run_test_server( + port: int, + platform_url: str, + backend_url: str, + client_id: str, + client_secret: str, +) -> None: + """Run a simple HTTP server for testing OAuth flows""" + import json as json_module + import threading + from http.server import BaseHTTPRequestHandler, HTTPServer + from urllib.request import Request, urlopen + + redirect_uri = f"http://localhost:{port}/callback" + + html_content = create_test_html( + platform_url=platform_url, + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + backend_url=backend_url, + ) + + class TestHandler(BaseHTTPRequestHandler): + def do_GET(self): + from urllib.parse import parse_qs + + # Parse the path + parsed = urlparse(self.path) + + # Serve the test page for root and callback + if parsed.path in ["/", "/callback"]: + self.send_response(200) + self.send_header("Content-Type", "text/html; charset=utf-8") + self.end_headers() + self.wfile.write(html_content.encode()) + + # Proxy API calls to backend (avoids CORS issues) + # Supports both /proxy/api/* and /proxy/external-api/* + elif parsed.path.startswith("/proxy/"): + try: + # Extract the API path and token from query params + api_path = parsed.path[len("/proxy") :] + query_params = parse_qs(parsed.query) + token = query_params.get("token", [None])[0] + + headers = {} + if token: + headers["Authorization"] = f"Bearer {token}" + + req = Request( + f"{backend_url}{api_path}", + headers=headers, + method="GET", + ) + + with urlopen(req) as response: + response_body = response.read() + self.send_response(response.status) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(response_body) + + except Exception as e: + error_msg = str(e) + status_code = 500 + if hasattr(e, "code"): + status_code = e.code # type: ignore + if hasattr(e, "read"): + try: + error_body = e.read().decode() # type: ignore + error_data = json_module.loads(error_body) + error_msg = error_data.get("detail", error_msg) + except Exception: + pass + + self.send_response(status_code) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(json_module.dumps({"detail": error_msg}).encode()) + + else: + self.send_response(404) + self.end_headers() + + def do_POST(self): + # Parse the path + parsed = urlparse(self.path) + + # Proxy token exchange to backend (avoids CORS issues) + if parsed.path == "/proxy/token": + try: + # Read request body + content_length = int(self.headers.get("Content-Length", 0)) + body = self.rfile.read(content_length) + + # Forward to backend + req = Request( + f"{backend_url}/api/oauth/token", + data=body, + headers={"Content-Type": "application/json"}, + method="POST", + ) + + with urlopen(req) as response: + response_body = response.read() + self.send_response(response.status) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(response_body) + + except Exception as e: + error_msg = str(e) + # Try to extract error detail from urllib error + if hasattr(e, "read"): + try: + error_body = e.read().decode() # type: ignore + error_data = json_module.loads(error_body) + error_msg = error_data.get("detail", error_msg) + except Exception: + pass + + self.send_response(500) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(json_module.dumps({"detail": error_msg}).encode()) + else: + self.send_response(404) + self.end_headers() + + def log_message(self, format, *args): + # Suppress default logging + pass + + server = HTTPServer(("localhost", port), TestHandler) + click.echo(f"\n🚀 Test server running at http://localhost:{port}") + click.echo(" Open this URL in your browser to test the OAuth flows\n") + + # Run server in a daemon thread + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Use a simple polling loop that can be interrupted + try: + while server_thread.is_alive(): + server_thread.join(timeout=1.0) + except KeyboardInterrupt: + pass + + click.echo("\n\n⏹️ Server stopped") + server.shutdown() + + +async def setup_and_cleanup_test_app( + owner_id: str, + redirect_uri: str, + port: int, + platform_url: str, + backend_url: str, +) -> None: + """ + Async context manager that handles test app lifecycle. + Creates the app, yields control to run the server, then cleans up. + """ + app_info: Optional[dict] = None + + try: + # Create test app in database + click.echo("\n📝 Creating temporary OAuth application...") + app_info = await create_test_app_in_db(owner_id, redirect_uri) + + click.echo(f"\n Client ID: {app_info['client_id']}") + click.echo(f" Client Secret: {app_info['client_secret'][:30]}...") + + # Run the test server (blocking, synchronous) + click.echo("\n" + "-" * 60) + click.echo(" Press Ctrl+C to stop the server and clean up") + click.echo("-" * 60) + + run_test_server( + port=port, + platform_url=platform_url, + backend_url=backend_url, + client_id=app_info["client_id"], + client_secret=app_info["client_secret"], + ) + + finally: + # Always clean up - we're still in the same event loop + if app_info: + try: + await cleanup_test_app(app_info["id"]) + except Exception as e: + click.echo(f"\n⚠️ Cleanup error: {e}", err=True) + click.echo( + f" You may need to manually delete app with ID: {app_info['id']}" + ) + + +@cli.command(name="test-server") +@click.option( + "--owner-id", + required=True, + help="User ID to own the temporary test OAuth application", +) +@click.option( + "--port", + default=TEST_SERVER_PORT, + help=f"Port to run the test server on (default: {TEST_SERVER_PORT})", +) +@click.option( + "--platform-url", + default="http://localhost:3000", + help="AutoGPT Platform frontend URL (default: http://localhost:3000)", +) +@click.option( + "--backend-url", + default="http://localhost:8006", + help="AutoGPT Platform backend URL (default: http://localhost:8006)", +) +def test_server_command( + owner_id: str, + port: int, + platform_url: str, + backend_url: str, +): + """Run a test server to test OAuth flows interactively + + This command: + 1. Creates a temporary OAuth application in the database + 2. Starts a minimal web server that acts as a third-party client + 3. Lets you test "Sign in with AutoGPT" and Integration Setup flows + 4. Cleans up all test data (app, tokens, codes) when you stop the server + + Example: + poetry run oauth-tool test-server --owner-id YOUR_USER_ID + + The test server will be available at http://localhost:9876 + """ + redirect_uri = f"http://localhost:{port}/callback" + + click.echo("=" * 60) + click.echo(" OAuth Test Server") + click.echo("=" * 60) + click.echo(f"\n Owner ID: {owner_id}") + click.echo(f" Platform URL: {platform_url}") + click.echo(f" Backend URL: {backend_url}") + click.echo(f" Test Server: http://localhost:{port}") + click.echo(f" Redirect URI: {redirect_uri}") + click.echo("\n" + "=" * 60) + + try: + # Run everything in a single event loop to keep Prisma client happy + asyncio.run( + setup_and_cleanup_test_app( + owner_id=owner_id, + redirect_uri=redirect_uri, + port=port, + platform_url=platform_url, + backend_url=backend_url, + ) + ) + except KeyboardInterrupt: + # Already handled inside, just exit cleanly + pass + except Exception as e: + click.echo(f"\n❌ Error: {e}", err=True) + sys.exit(1) + + +if __name__ == "__main__": + cli() diff --git a/autogpt_platform/backend/backend/data/api_key.py b/autogpt_platform/backend/backend/data/auth/api_key.py similarity index 95% rename from autogpt_platform/backend/backend/data/api_key.py rename to autogpt_platform/backend/backend/data/auth/api_key.py index 45194897de..2ecd5be9a5 100644 --- a/autogpt_platform/backend/backend/data/api_key.py +++ b/autogpt_platform/backend/backend/data/auth/api_key.py @@ -1,22 +1,24 @@ import logging import uuid from datetime import datetime, timezone -from typing import Optional +from typing import Literal, Optional from autogpt_libs.api_key.keysmith import APIKeySmith from prisma.enums import APIKeyPermission, APIKeyStatus from prisma.models import APIKey as PrismaAPIKey from prisma.types import APIKeyWhereUniqueInput -from pydantic import BaseModel, Field +from pydantic import Field from backend.data.includes import MAX_USER_API_KEYS_FETCH from backend.util.exceptions import NotAuthorizedError, NotFoundError +from .base import APIAuthorizationInfo + logger = logging.getLogger(__name__) keysmith = APIKeySmith() -class APIKeyInfo(BaseModel): +class APIKeyInfo(APIAuthorizationInfo): id: str name: str head: str = Field( @@ -26,12 +28,9 @@ class APIKeyInfo(BaseModel): description=f"The last {APIKeySmith.TAIL_LENGTH} characters of the key" ) status: APIKeyStatus - permissions: list[APIKeyPermission] - created_at: datetime - last_used_at: Optional[datetime] = None - revoked_at: Optional[datetime] = None description: Optional[str] = None - user_id: str + + type: Literal["api_key"] = "api_key" # type: ignore @staticmethod def from_db(api_key: PrismaAPIKey): @@ -41,7 +40,7 @@ class APIKeyInfo(BaseModel): head=api_key.head, tail=api_key.tail, status=APIKeyStatus(api_key.status), - permissions=[APIKeyPermission(p) for p in api_key.permissions], + scopes=[APIKeyPermission(p) for p in api_key.permissions], created_at=api_key.createdAt, last_used_at=api_key.lastUsedAt, revoked_at=api_key.revokedAt, @@ -211,7 +210,7 @@ async def suspend_api_key(key_id: str, user_id: str) -> APIKeyInfo: def has_permission(api_key: APIKeyInfo, required_permission: APIKeyPermission) -> bool: - return required_permission in api_key.permissions + return required_permission in api_key.scopes async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyInfo]: diff --git a/autogpt_platform/backend/backend/data/auth/base.py b/autogpt_platform/backend/backend/data/auth/base.py new file mode 100644 index 0000000000..e307b5f49f --- /dev/null +++ b/autogpt_platform/backend/backend/data/auth/base.py @@ -0,0 +1,15 @@ +from datetime import datetime +from typing import Literal, Optional + +from prisma.enums import APIKeyPermission +from pydantic import BaseModel + + +class APIAuthorizationInfo(BaseModel): + user_id: str + scopes: list[APIKeyPermission] + type: Literal["oauth", "api_key"] + created_at: datetime + expires_at: Optional[datetime] = None + last_used_at: Optional[datetime] = None + revoked_at: Optional[datetime] = None diff --git a/autogpt_platform/backend/backend/data/auth/oauth.py b/autogpt_platform/backend/backend/data/auth/oauth.py new file mode 100644 index 0000000000..e49586194c --- /dev/null +++ b/autogpt_platform/backend/backend/data/auth/oauth.py @@ -0,0 +1,872 @@ +""" +OAuth 2.0 Provider Data Layer + +Handles management of OAuth applications, authorization codes, +access tokens, and refresh tokens. + +Hashing strategy: +- Access tokens & Refresh tokens: SHA256 (deterministic, allows direct lookup by hash) +- Client secrets: Scrypt with salt (lookup by client_id, then verify with salt) +""" + +import hashlib +import logging +import secrets +import uuid +from datetime import datetime, timedelta, timezone +from typing import Literal, Optional + +from autogpt_libs.api_key.keysmith import APIKeySmith +from prisma.enums import APIKeyPermission as APIPermission +from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken +from prisma.models import OAuthApplication as PrismaOAuthApplication +from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode +from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken +from prisma.types import OAuthApplicationUpdateInput +from pydantic import BaseModel, Field, SecretStr + +from .base import APIAuthorizationInfo + +logger = logging.getLogger(__name__) +keysmith = APIKeySmith() # Only used for client secret hashing (Scrypt) + + +def _generate_token() -> str: + """Generate a cryptographically secure random token.""" + return secrets.token_urlsafe(32) + + +def _hash_token(token: str) -> str: + """Hash a token using SHA256 (deterministic, for direct lookup).""" + return hashlib.sha256(token.encode()).hexdigest() + + +# Token TTLs +AUTHORIZATION_CODE_TTL = timedelta(minutes=10) +ACCESS_TOKEN_TTL = timedelta(hours=1) +REFRESH_TOKEN_TTL = timedelta(days=30) + +ACCESS_TOKEN_PREFIX = "agpt_xt_" +REFRESH_TOKEN_PREFIX = "agpt_rt_" + + +# ============================================================================ +# Exception Classes +# ============================================================================ + + +class OAuthError(Exception): + """Base OAuth error""" + + pass + + +class InvalidClientError(OAuthError): + """Invalid client_id or client_secret""" + + pass + + +class InvalidGrantError(OAuthError): + """Invalid or expired authorization code/refresh token""" + + def __init__(self, reason: str): + self.reason = reason + super().__init__(f"Invalid grant: {reason}") + + +class InvalidTokenError(OAuthError): + """Invalid, expired, or revoked token""" + + def __init__(self, reason: str): + self.reason = reason + super().__init__(f"Invalid token: {reason}") + + +# ============================================================================ +# Data Models +# ============================================================================ + + +class OAuthApplicationInfo(BaseModel): + """OAuth application information (without client secret hash)""" + + id: str + name: str + description: Optional[str] = None + logo_url: Optional[str] = None + client_id: str + redirect_uris: list[str] + grant_types: list[str] + scopes: list[APIPermission] + owner_id: str + is_active: bool + created_at: datetime + updated_at: datetime + + @staticmethod + def from_db(app: PrismaOAuthApplication): + return OAuthApplicationInfo( + id=app.id, + name=app.name, + description=app.description, + logo_url=app.logoUrl, + client_id=app.clientId, + redirect_uris=app.redirectUris, + grant_types=app.grantTypes, + scopes=[APIPermission(s) for s in app.scopes], + owner_id=app.ownerId, + is_active=app.isActive, + created_at=app.createdAt, + updated_at=app.updatedAt, + ) + + +class OAuthApplicationInfoWithSecret(OAuthApplicationInfo): + """OAuth application with client secret hash (for validation)""" + + client_secret_hash: str + client_secret_salt: str + + @staticmethod + def from_db(app: PrismaOAuthApplication): + return OAuthApplicationInfoWithSecret( + **OAuthApplicationInfo.from_db(app).model_dump(), + client_secret_hash=app.clientSecret, + client_secret_salt=app.clientSecretSalt, + ) + + def verify_secret(self, plaintext_secret: str) -> bool: + """Verify a plaintext client secret against the stored hash""" + # Use keysmith.verify_key() with stored salt + return keysmith.verify_key( + plaintext_secret, self.client_secret_hash, self.client_secret_salt + ) + + +class OAuthAuthorizationCodeInfo(BaseModel): + """Authorization code information""" + + id: str + code: str + created_at: datetime + expires_at: datetime + application_id: str + user_id: str + scopes: list[APIPermission] + redirect_uri: str + code_challenge: Optional[str] = None + code_challenge_method: Optional[str] = None + used_at: Optional[datetime] = None + + @property + def is_used(self) -> bool: + return self.used_at is not None + + @staticmethod + def from_db(code: PrismaOAuthAuthorizationCode): + return OAuthAuthorizationCodeInfo( + id=code.id, + code=code.code, + created_at=code.createdAt, + expires_at=code.expiresAt, + application_id=code.applicationId, + user_id=code.userId, + scopes=[APIPermission(s) for s in code.scopes], + redirect_uri=code.redirectUri, + code_challenge=code.codeChallenge, + code_challenge_method=code.codeChallengeMethod, + used_at=code.usedAt, + ) + + +class OAuthAccessTokenInfo(APIAuthorizationInfo): + """Access token information""" + + id: str + expires_at: datetime # type: ignore + application_id: str + + type: Literal["oauth"] = "oauth" # type: ignore + + @staticmethod + def from_db(token: PrismaOAuthAccessToken): + return OAuthAccessTokenInfo( + id=token.id, + user_id=token.userId, + scopes=[APIPermission(s) for s in token.scopes], + created_at=token.createdAt, + expires_at=token.expiresAt, + last_used_at=None, + revoked_at=token.revokedAt, + application_id=token.applicationId, + ) + + +class OAuthAccessToken(OAuthAccessTokenInfo): + """Access token with plaintext token included (sensitive)""" + + token: SecretStr = Field(description="Plaintext token (sensitive)") + + @staticmethod + def from_db(token: PrismaOAuthAccessToken, plaintext_token: str): # type: ignore + return OAuthAccessToken( + **OAuthAccessTokenInfo.from_db(token).model_dump(), + token=SecretStr(plaintext_token), + ) + + +class OAuthRefreshTokenInfo(BaseModel): + """Refresh token information""" + + id: str + user_id: str + scopes: list[APIPermission] + created_at: datetime + expires_at: datetime + application_id: str + revoked_at: Optional[datetime] = None + + @property + def is_revoked(self) -> bool: + return self.revoked_at is not None + + @staticmethod + def from_db(token: PrismaOAuthRefreshToken): + return OAuthRefreshTokenInfo( + id=token.id, + user_id=token.userId, + scopes=[APIPermission(s) for s in token.scopes], + created_at=token.createdAt, + expires_at=token.expiresAt, + application_id=token.applicationId, + revoked_at=token.revokedAt, + ) + + +class OAuthRefreshToken(OAuthRefreshTokenInfo): + """Refresh token with plaintext token included (sensitive)""" + + token: SecretStr = Field(description="Plaintext token (sensitive)") + + @staticmethod + def from_db(token: PrismaOAuthRefreshToken, plaintext_token: str): # type: ignore + return OAuthRefreshToken( + **OAuthRefreshTokenInfo.from_db(token).model_dump(), + token=SecretStr(plaintext_token), + ) + + +class TokenIntrospectionResult(BaseModel): + """Result of token introspection (RFC 7662)""" + + active: bool + scopes: Optional[list[str]] = None + client_id: Optional[str] = None + user_id: Optional[str] = None + exp: Optional[int] = None # Unix timestamp + token_type: Optional[Literal["access_token", "refresh_token"]] = None + + +# ============================================================================ +# OAuth Application Management +# ============================================================================ + + +async def get_oauth_application(client_id: str) -> Optional[OAuthApplicationInfo]: + """Get OAuth application by client ID (without secret)""" + app = await PrismaOAuthApplication.prisma().find_unique( + where={"clientId": client_id} + ) + if not app: + return None + return OAuthApplicationInfo.from_db(app) + + +async def get_oauth_application_with_secret( + client_id: str, +) -> Optional[OAuthApplicationInfoWithSecret]: + """Get OAuth application by client ID (with secret hash for validation)""" + app = await PrismaOAuthApplication.prisma().find_unique( + where={"clientId": client_id} + ) + if not app: + return None + return OAuthApplicationInfoWithSecret.from_db(app) + + +async def validate_client_credentials( + client_id: str, client_secret: str +) -> OAuthApplicationInfo: + """ + Validate client credentials and return application info. + + Raises: + InvalidClientError: If client_id or client_secret is invalid, or app is inactive + """ + app = await get_oauth_application_with_secret(client_id) + if not app: + raise InvalidClientError("Invalid client_id") + + if not app.is_active: + raise InvalidClientError("Application is not active") + + # Verify client secret + if not app.verify_secret(client_secret): + raise InvalidClientError("Invalid client_secret") + + # Return without secret hash + return OAuthApplicationInfo(**app.model_dump(exclude={"client_secret_hash"})) + + +def validate_redirect_uri(app: OAuthApplicationInfo, redirect_uri: str) -> bool: + """Validate that redirect URI is registered for the application""" + return redirect_uri in app.redirect_uris + + +def validate_scopes( + app: OAuthApplicationInfo, requested_scopes: list[APIPermission] +) -> bool: + """Validate that all requested scopes are allowed for the application""" + return all(scope in app.scopes for scope in requested_scopes) + + +# ============================================================================ +# Authorization Code Flow +# ============================================================================ + + +def _generate_authorization_code() -> str: + """Generate a cryptographically secure authorization code""" + # 32 bytes = 256 bits of entropy + return secrets.token_urlsafe(32) + + +async def create_authorization_code( + application_id: str, + user_id: str, + scopes: list[APIPermission], + redirect_uri: str, + code_challenge: Optional[str] = None, + code_challenge_method: Optional[Literal["S256", "plain"]] = None, +) -> OAuthAuthorizationCodeInfo: + """ + Create a new authorization code. + Expires in 10 minutes and can only be used once. + """ + code = _generate_authorization_code() + now = datetime.now(timezone.utc) + expires_at = now + AUTHORIZATION_CODE_TTL + + saved_code = await PrismaOAuthAuthorizationCode.prisma().create( + data={ + "id": str(uuid.uuid4()), + "code": code, + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + "redirectUri": redirect_uri, + "codeChallenge": code_challenge, + "codeChallengeMethod": code_challenge_method, + } + ) + + return OAuthAuthorizationCodeInfo.from_db(saved_code) + + +async def consume_authorization_code( + code: str, + application_id: str, + redirect_uri: str, + code_verifier: Optional[str] = None, +) -> tuple[str, list[APIPermission]]: + """ + Consume an authorization code and return (user_id, scopes). + + This marks the code as used and validates: + - Code exists and matches application + - Code is not expired + - Code has not been used + - Redirect URI matches + - PKCE code verifier matches (if code challenge was provided) + + Raises: + InvalidGrantError: If code is invalid, expired, used, or PKCE fails + """ + auth_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": code} + ) + + if not auth_code: + raise InvalidGrantError("authorization code not found") + + # Validate application + if auth_code.applicationId != application_id: + raise InvalidGrantError( + "authorization code does not belong to this application" + ) + + # Check if already used + if auth_code.usedAt is not None: + raise InvalidGrantError( + f"authorization code already used at {auth_code.usedAt}" + ) + + # Check expiration + now = datetime.now(timezone.utc) + if auth_code.expiresAt < now: + raise InvalidGrantError("authorization code expired") + + # Validate redirect URI + if auth_code.redirectUri != redirect_uri: + raise InvalidGrantError("redirect_uri mismatch") + + # Validate PKCE if code challenge was provided + if auth_code.codeChallenge: + if not code_verifier: + raise InvalidGrantError("code_verifier required but not provided") + + if not _verify_pkce( + code_verifier, auth_code.codeChallenge, auth_code.codeChallengeMethod + ): + raise InvalidGrantError("PKCE verification failed") + + # Mark code as used + await PrismaOAuthAuthorizationCode.prisma().update( + where={"code": code}, + data={"usedAt": now}, + ) + + return auth_code.userId, [APIPermission(s) for s in auth_code.scopes] + + +def _verify_pkce( + code_verifier: str, code_challenge: str, code_challenge_method: Optional[str] +) -> bool: + """ + Verify PKCE code verifier against code challenge. + + Supports: + - S256: SHA256(code_verifier) == code_challenge + - plain: code_verifier == code_challenge + """ + if code_challenge_method == "S256": + # Hash the verifier with SHA256 and base64url encode + hashed = hashlib.sha256(code_verifier.encode("ascii")).digest() + computed_challenge = ( + secrets.token_urlsafe(len(hashed)).encode("ascii").decode("ascii") + ) + # For proper base64url encoding + import base64 + + computed_challenge = ( + base64.urlsafe_b64encode(hashed).decode("ascii").rstrip("=") + ) + return secrets.compare_digest(computed_challenge, code_challenge) + elif code_challenge_method == "plain" or code_challenge_method is None: + # Plain comparison + return secrets.compare_digest(code_verifier, code_challenge) + else: + logger.warning(f"Unsupported code challenge method: {code_challenge_method}") + return False + + +# ============================================================================ +# Access Token Management +# ============================================================================ + + +async def create_access_token( + application_id: str, user_id: str, scopes: list[APIPermission] +) -> OAuthAccessToken: + """ + Create a new access token. + Returns OAuthAccessToken (with plaintext token). + """ + plaintext_token = ACCESS_TOKEN_PREFIX + _generate_token() + token_hash = _hash_token(plaintext_token) + now = datetime.now(timezone.utc) + expires_at = now + ACCESS_TOKEN_TTL + + saved_token = await PrismaOAuthAccessToken.prisma().create( + data={ + "id": str(uuid.uuid4()), + "token": token_hash, # SHA256 hash for direct lookup + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + } + ) + + return OAuthAccessToken.from_db(saved_token, plaintext_token=plaintext_token) + + +async def validate_access_token( + token: str, +) -> tuple[OAuthAccessTokenInfo, OAuthApplicationInfo]: + """ + Validate an access token and return token info. + + Raises: + InvalidTokenError: If token is invalid, expired, or revoked + InvalidClientError: If the client application is not marked as active + """ + token_hash = _hash_token(token) + + # Direct lookup by hash + access_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": token_hash}, include={"Application": True} + ) + + if not access_token: + raise InvalidTokenError("access token not found") + + if not access_token.Application: # should be impossible + raise InvalidClientError("Client application not found") + + if not access_token.Application.isActive: + raise InvalidClientError("Client application is disabled") + + if access_token.revokedAt is not None: + raise InvalidTokenError("access token has been revoked") + + # Check expiration + now = datetime.now(timezone.utc) + if access_token.expiresAt < now: + raise InvalidTokenError("access token expired") + + return ( + OAuthAccessTokenInfo.from_db(access_token), + OAuthApplicationInfo.from_db(access_token.Application), + ) + + +async def revoke_access_token( + token: str, application_id: str +) -> OAuthAccessTokenInfo | None: + """ + Revoke an access token. + + Args: + token: The plaintext access token to revoke + application_id: The application ID making the revocation request. + Only tokens belonging to this application will be revoked. + + Returns: + OAuthAccessTokenInfo if token was found and revoked, None otherwise. + + Note: + Always performs exactly 2 DB queries regardless of outcome to prevent + timing side-channel attacks that could reveal token existence. + """ + try: + token_hash = _hash_token(token) + + # Use update_many to filter by both token and applicationId + updated_count = await PrismaOAuthAccessToken.prisma().update_many( + where={ + "token": token_hash, + "applicationId": application_id, + "revokedAt": None, + }, + data={"revokedAt": datetime.now(timezone.utc)}, + ) + + # Always perform second query to ensure constant time + result = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": token_hash} + ) + + # Only return result if we actually revoked something + if updated_count == 0: + return None + + return OAuthAccessTokenInfo.from_db(result) if result else None + except Exception as e: + logger.exception(f"Error revoking access token: {e}") + return None + + +# ============================================================================ +# Refresh Token Management +# ============================================================================ + + +async def create_refresh_token( + application_id: str, user_id: str, scopes: list[APIPermission] +) -> OAuthRefreshToken: + """ + Create a new refresh token. + Returns OAuthRefreshToken (with plaintext token). + """ + plaintext_token = REFRESH_TOKEN_PREFIX + _generate_token() + token_hash = _hash_token(plaintext_token) + now = datetime.now(timezone.utc) + expires_at = now + REFRESH_TOKEN_TTL + + saved_token = await PrismaOAuthRefreshToken.prisma().create( + data={ + "id": str(uuid.uuid4()), + "token": token_hash, # SHA256 hash for direct lookup + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + } + ) + + return OAuthRefreshToken.from_db(saved_token, plaintext_token=plaintext_token) + + +async def refresh_tokens( + refresh_token: str, application_id: str +) -> tuple[OAuthAccessToken, OAuthRefreshToken]: + """ + Use a refresh token to create new access and refresh tokens. + Returns (new_access_token, new_refresh_token) both with plaintext tokens included. + + Raises: + InvalidGrantError: If refresh token is invalid, expired, or revoked + """ + token_hash = _hash_token(refresh_token) + + # Direct lookup by hash + rt = await PrismaOAuthRefreshToken.prisma().find_unique(where={"token": token_hash}) + + if not rt: + raise InvalidGrantError("refresh token not found") + + # NOTE: no need to check Application.isActive, this is checked by the token endpoint + + if rt.revokedAt is not None: + raise InvalidGrantError("refresh token has been revoked") + + # Validate application + if rt.applicationId != application_id: + raise InvalidGrantError("refresh token does not belong to this application") + + # Check expiration + now = datetime.now(timezone.utc) + if rt.expiresAt < now: + raise InvalidGrantError("refresh token expired") + + # Revoke old refresh token + await PrismaOAuthRefreshToken.prisma().update( + where={"token": token_hash}, + data={"revokedAt": now}, + ) + + # Create new access and refresh tokens with same scopes + scopes = [APIPermission(s) for s in rt.scopes] + new_access_token = await create_access_token( + rt.applicationId, + rt.userId, + scopes, + ) + new_refresh_token = await create_refresh_token( + rt.applicationId, + rt.userId, + scopes, + ) + + return new_access_token, new_refresh_token + + +async def revoke_refresh_token( + token: str, application_id: str +) -> OAuthRefreshTokenInfo | None: + """ + Revoke a refresh token. + + Args: + token: The plaintext refresh token to revoke + application_id: The application ID making the revocation request. + Only tokens belonging to this application will be revoked. + + Returns: + OAuthRefreshTokenInfo if token was found and revoked, None otherwise. + + Note: + Always performs exactly 2 DB queries regardless of outcome to prevent + timing side-channel attacks that could reveal token existence. + """ + try: + token_hash = _hash_token(token) + + # Use update_many to filter by both token and applicationId + updated_count = await PrismaOAuthRefreshToken.prisma().update_many( + where={ + "token": token_hash, + "applicationId": application_id, + "revokedAt": None, + }, + data={"revokedAt": datetime.now(timezone.utc)}, + ) + + # Always perform second query to ensure constant time + result = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": token_hash} + ) + + # Only return result if we actually revoked something + if updated_count == 0: + return None + + return OAuthRefreshTokenInfo.from_db(result) if result else None + except Exception as e: + logger.exception(f"Error revoking refresh token: {e}") + return None + + +# ============================================================================ +# Token Introspection +# ============================================================================ + + +async def introspect_token( + token: str, + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = None, +) -> TokenIntrospectionResult: + """ + Introspect a token and return its metadata (RFC 7662). + + Returns TokenIntrospectionResult with active=True and metadata if valid, + or active=False if the token is invalid/expired/revoked. + """ + # Try as access token first (or if hint says "access_token") + if token_type_hint != "refresh_token": + try: + token_info, app = await validate_access_token(token) + return TokenIntrospectionResult( + active=True, + scopes=list(s.value for s in token_info.scopes), + client_id=app.client_id if app else None, + user_id=token_info.user_id, + exp=int(token_info.expires_at.timestamp()), + token_type="access_token", + ) + except InvalidTokenError: + pass # Try as refresh token + + # Try as refresh token + token_hash = _hash_token(token) + refresh_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": token_hash} + ) + + if refresh_token and refresh_token.revokedAt is None: + # Check if valid (not expired) + now = datetime.now(timezone.utc) + if refresh_token.expiresAt > now: + app = await get_oauth_application_by_id(refresh_token.applicationId) + return TokenIntrospectionResult( + active=True, + scopes=list(s for s in refresh_token.scopes), + client_id=app.client_id if app else None, + user_id=refresh_token.userId, + exp=int(refresh_token.expiresAt.timestamp()), + token_type="refresh_token", + ) + + # Token not found or inactive + return TokenIntrospectionResult(active=False) + + +async def get_oauth_application_by_id(app_id: str) -> Optional[OAuthApplicationInfo]: + """Get OAuth application by ID""" + app = await PrismaOAuthApplication.prisma().find_unique(where={"id": app_id}) + if not app: + return None + return OAuthApplicationInfo.from_db(app) + + +async def list_user_oauth_applications(user_id: str) -> list[OAuthApplicationInfo]: + """Get all OAuth applications owned by a user""" + apps = await PrismaOAuthApplication.prisma().find_many( + where={"ownerId": user_id}, + order={"createdAt": "desc"}, + ) + return [OAuthApplicationInfo.from_db(app) for app in apps] + + +async def update_oauth_application( + app_id: str, + *, + owner_id: str, + is_active: Optional[bool] = None, + logo_url: Optional[str] = None, +) -> Optional[OAuthApplicationInfo]: + """ + Update OAuth application active status. + Only the owner can update their app's status. + + Returns the updated app info, or None if app not found or not owned by user. + """ + # First verify ownership + app = await PrismaOAuthApplication.prisma().find_first( + where={"id": app_id, "ownerId": owner_id} + ) + if not app: + return None + + patch: OAuthApplicationUpdateInput = {} + if is_active is not None: + patch["isActive"] = is_active + if logo_url: + patch["logoUrl"] = logo_url + if not patch: + return OAuthApplicationInfo.from_db(app) # return unchanged + + updated_app = await PrismaOAuthApplication.prisma().update( + where={"id": app_id}, + data=patch, + ) + return OAuthApplicationInfo.from_db(updated_app) if updated_app else None + + +# ============================================================================ +# Token Cleanup +# ============================================================================ + + +async def cleanup_expired_oauth_tokens() -> dict[str, int]: + """ + Delete expired OAuth tokens from the database. + + This removes: + - Expired authorization codes (10 min TTL) + - Expired access tokens (1 hour TTL) + - Expired refresh tokens (30 day TTL) + + Returns a dict with counts of deleted tokens by type. + """ + now = datetime.now(timezone.utc) + + # Delete expired authorization codes + codes_result = await PrismaOAuthAuthorizationCode.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + # Delete expired access tokens + access_result = await PrismaOAuthAccessToken.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + # Delete expired refresh tokens + refresh_result = await PrismaOAuthRefreshToken.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + deleted = { + "authorization_codes": codes_result, + "access_tokens": access_result, + "refresh_tokens": refresh_result, + } + + total = sum(deleted.values()) + if total > 0: + logger.info(f"Cleaned up {total} expired OAuth tokens: {deleted}") + + return deleted diff --git a/autogpt_platform/backend/backend/executor/scheduler.py b/autogpt_platform/backend/backend/executor/scheduler.py index 6a0bb593c6..06c50bf82e 100644 --- a/autogpt_platform/backend/backend/executor/scheduler.py +++ b/autogpt_platform/backend/backend/executor/scheduler.py @@ -23,6 +23,7 @@ from dotenv import load_dotenv from pydantic import BaseModel, Field, ValidationError from sqlalchemy import MetaData, create_engine +from backend.data.auth.oauth import cleanup_expired_oauth_tokens from backend.data.block import BlockInput from backend.data.execution import GraphExecutionWithNodes from backend.data.model import CredentialsMetaInput @@ -242,6 +243,12 @@ def cleanup_expired_files(): run_async(cleanup_expired_files_async()) +def cleanup_oauth_tokens(): + """Clean up expired OAuth tokens from the database.""" + # Wait for completion + run_async(cleanup_expired_oauth_tokens()) + + def execution_accuracy_alerts(): """Check execution accuracy and send alerts if drops are detected.""" return report_execution_accuracy_alerts() @@ -446,6 +453,17 @@ class Scheduler(AppService): jobstore=Jobstores.EXECUTION.value, ) + # OAuth Token Cleanup - configurable interval + self.scheduler.add_job( + cleanup_oauth_tokens, + id="cleanup_oauth_tokens", + trigger="interval", + replace_existing=True, + seconds=config.oauth_token_cleanup_interval_hours + * 3600, # Convert hours to seconds + jobstore=Jobstores.EXECUTION.value, + ) + # Execution Accuracy Monitoring - configurable interval self.scheduler.add_job( execution_accuracy_alerts, @@ -604,6 +622,11 @@ class Scheduler(AppService): """Manually trigger cleanup of expired cloud storage files.""" return cleanup_expired_files() + @expose + def execute_cleanup_oauth_tokens(self): + """Manually trigger cleanup of expired OAuth tokens.""" + return cleanup_oauth_tokens() + @expose def execute_report_execution_accuracy_alerts(self): """Manually trigger execution accuracy alert checking.""" diff --git a/autogpt_platform/backend/backend/server/external/middleware.py b/autogpt_platform/backend/backend/server/external/middleware.py index af84c92687..0c278e1715 100644 --- a/autogpt_platform/backend/backend/server/external/middleware.py +++ b/autogpt_platform/backend/backend/server/external/middleware.py @@ -1,36 +1,107 @@ -from fastapi import HTTPException, Security -from fastapi.security import APIKeyHeader +from fastapi import HTTPException, Security, status +from fastapi.security import APIKeyHeader, HTTPAuthorizationCredentials, HTTPBearer from prisma.enums import APIKeyPermission -from backend.data.api_key import APIKeyInfo, has_permission, validate_api_key +from backend.data.auth.api_key import APIKeyInfo, validate_api_key +from backend.data.auth.base import APIAuthorizationInfo +from backend.data.auth.oauth import ( + InvalidClientError, + InvalidTokenError, + OAuthAccessTokenInfo, + validate_access_token, +) api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False) +bearer_auth = HTTPBearer(auto_error=False) async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKeyInfo: - """Base middleware for API key authentication""" + """Middleware for API key authentication only""" if api_key is None: - raise HTTPException(status_code=401, detail="Missing API key") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing API key" + ) api_key_obj = await validate_api_key(api_key) if not api_key_obj: - raise HTTPException(status_code=401, detail="Invalid API key") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key" + ) return api_key_obj +async def require_access_token( + bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth), +) -> OAuthAccessTokenInfo: + """Middleware for OAuth access token authentication only""" + if bearer is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Missing Authorization header", + ) + + try: + token_info, _ = await validate_access_token(bearer.credentials) + except (InvalidClientError, InvalidTokenError) as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) + + return token_info + + +async def require_auth( + api_key: str | None = Security(api_key_header), + bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth), +) -> APIAuthorizationInfo: + """ + Unified authentication middleware supporting both API keys and OAuth tokens. + + Supports two authentication methods, which are checked in order: + 1. X-API-Key header (existing API key authentication) + 2. Authorization: Bearer header (OAuth access token) + + Returns: + APIAuthorizationInfo: base class of both APIKeyInfo and OAuthAccessTokenInfo. + """ + # Try API key first + if api_key is not None: + api_key_info = await validate_api_key(api_key) + if api_key_info: + return api_key_info + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key" + ) + + # Try OAuth bearer token + if bearer is not None: + try: + token_info, _ = await validate_access_token(bearer.credentials) + return token_info + except (InvalidClientError, InvalidTokenError) as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) + + # No credentials provided + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Missing authentication. Provide API key or access token.", + ) + + def require_permission(permission: APIKeyPermission): - """Dependency function for checking specific permissions""" + """ + Dependency function for checking specific permissions + (works with API keys and OAuth tokens) + """ async def check_permission( - api_key: APIKeyInfo = Security(require_api_key), - ) -> APIKeyInfo: - if not has_permission(api_key, permission): + auth: APIAuthorizationInfo = Security(require_auth), + ) -> APIAuthorizationInfo: + if permission not in auth.scopes: raise HTTPException( - status_code=403, - detail=f"API key lacks the required permission '{permission}'", + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Missing required permission: {permission.value}", ) - return api_key + return auth return check_permission diff --git a/autogpt_platform/backend/backend/server/external/routes/integrations.py b/autogpt_platform/backend/backend/server/external/routes/integrations.py index d64ca5615f..f9a8875ada 100644 --- a/autogpt_platform/backend/backend/server/external/routes/integrations.py +++ b/autogpt_platform/backend/backend/server/external/routes/integrations.py @@ -16,7 +16,7 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field, SecretStr -from backend.data.api_key import APIKeyInfo +from backend.data.auth.base import APIAuthorizationInfo from backend.data.model import ( APIKeyCredentials, Credentials, @@ -255,7 +255,7 @@ def _get_oauth_handler_for_external( @integrations_router.get("/providers", response_model=list[ProviderInfo]) async def list_providers( - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[ProviderInfo]: @@ -319,7 +319,7 @@ async def list_providers( async def initiate_oauth( provider: Annotated[str, Path(title="The OAuth provider")], request: OAuthInitiateRequest, - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> OAuthInitiateResponse: @@ -337,7 +337,10 @@ async def initiate_oauth( if not validate_callback_url(request.callback_url): raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f"Callback URL origin is not allowed. Allowed origins: {settings.config.external_oauth_callback_origins}", + detail=( + f"Callback URL origin is not allowed. " + f"Allowed origins: {settings.config.external_oauth_callback_origins}", + ), ) # Validate provider @@ -359,13 +362,15 @@ async def initiate_oauth( ) # Store state token with external flow metadata + # Note: initiated_by_api_key_id is only available for API key auth, not OAuth + api_key_id = getattr(auth, "id", None) if auth.type == "api_key" else None state_token, code_challenge = await creds_manager.store.store_state_token( - user_id=api_key.user_id, + user_id=auth.user_id, provider=provider if isinstance(provider_name, str) else provider_name.value, scopes=request.scopes, callback_url=request.callback_url, state_metadata=request.state_metadata, - initiated_by_api_key_id=api_key.id, + initiated_by_api_key_id=api_key_id, ) # Build login URL @@ -393,7 +398,7 @@ async def initiate_oauth( async def complete_oauth( provider: Annotated[str, Path(title="The OAuth provider")], request: OAuthCompleteRequest, - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> OAuthCompleteResponse: @@ -406,7 +411,7 @@ async def complete_oauth( """ # Verify state token valid_state = await creds_manager.store.verify_state_token( - api_key.user_id, request.state_token, provider + auth.user_id, request.state_token, provider ) if not valid_state: @@ -453,7 +458,7 @@ async def complete_oauth( ) # Store credentials - await creds_manager.create(api_key.user_id, credentials) + await creds_manager.create(auth.user_id, credentials) logger.info(f"Successfully completed external OAuth for provider {provider}") @@ -470,7 +475,7 @@ async def complete_oauth( @integrations_router.get("/credentials", response_model=list[CredentialSummary]) async def list_credentials( - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[CredentialSummary]: @@ -479,7 +484,7 @@ async def list_credentials( Returns metadata about each credential without exposing sensitive tokens. """ - credentials = await creds_manager.store.get_all_creds(api_key.user_id) + credentials = await creds_manager.store.get_all_creds(auth.user_id) return [ CredentialSummary( id=cred.id, @@ -499,7 +504,7 @@ async def list_credentials( ) async def list_credentials_by_provider( provider: Annotated[str, Path(title="The provider to list credentials for")], - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[CredentialSummary]: @@ -507,7 +512,7 @@ async def list_credentials_by_provider( List credentials for a specific provider. """ credentials = await creds_manager.store.get_creds_by_provider( - api_key.user_id, provider + auth.user_id, provider ) return [ CredentialSummary( @@ -536,7 +541,7 @@ async def create_credential( CreateUserPasswordCredentialRequest, CreateHostScopedCredentialRequest, ] = Body(..., discriminator="type"), - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> CreateCredentialResponse: @@ -591,7 +596,7 @@ async def create_credential( # Store credentials try: - await creds_manager.create(api_key.user_id, credentials) + await creds_manager.create(auth.user_id, credentials) except Exception as e: logger.error(f"Failed to store credentials: {e}") raise HTTPException( @@ -623,7 +628,7 @@ class DeleteCredentialResponse(BaseModel): async def delete_credential( provider: Annotated[str, Path(title="The provider")], cred_id: Annotated[str, Path(title="The credential ID to delete")], - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.DELETE_INTEGRATIONS) ), ) -> DeleteCredentialResponse: @@ -634,7 +639,7 @@ async def delete_credential( use the main API's delete endpoint which handles webhook cleanup and token revocation. """ - creds = await creds_manager.store.get_creds_by_id(api_key.user_id, cred_id) + creds = await creds_manager.store.get_creds_by_id(auth.user_id, cred_id) if not creds: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Credentials not found" @@ -645,6 +650,6 @@ async def delete_credential( detail="Credentials do not match the specified provider", ) - await creds_manager.delete(api_key.user_id, cred_id) + await creds_manager.delete(auth.user_id, cred_id) return DeleteCredentialResponse(deleted=True, credentials_id=cred_id) diff --git a/autogpt_platform/backend/backend/server/external/routes/tools.py b/autogpt_platform/backend/backend/server/external/routes/tools.py index 3a821c5be8..8e3f4cbfdb 100644 --- a/autogpt_platform/backend/backend/server/external/routes/tools.py +++ b/autogpt_platform/backend/backend/server/external/routes/tools.py @@ -14,7 +14,7 @@ from fastapi import APIRouter, Security from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field -from backend.data.api_key import APIKeyInfo +from backend.data.auth.base import APIAuthorizationInfo from backend.server.external.middleware import require_permission from backend.server.v2.chat.model import ChatSession from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool @@ -24,9 +24,9 @@ logger = logging.getLogger(__name__) tools_router = APIRouter(prefix="/tools", tags=["tools"]) -# Note: We use Security() as a function parameter dependency (api_key: APIKeyInfo = Security(...)) +# Note: We use Security() as a function parameter dependency (auth: APIAuthorizationInfo = Security(...)) # rather than in the decorator's dependencies= list. This avoids duplicate permission checks -# while still enforcing auth AND giving us access to the api_key for extracting user_id. +# while still enforcing auth AND giving us access to auth for extracting user_id. # Request models @@ -80,7 +80,9 @@ def _create_ephemeral_session(user_id: str | None) -> ChatSession: ) async def find_agent( request: FindAgentRequest, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.USE_TOOLS) + ), ) -> dict[str, Any]: """ Search for agents in the marketplace based on capabilities and user needs. @@ -91,9 +93,9 @@ async def find_agent( Returns: List of matching agents or no results response """ - session = _create_ephemeral_session(api_key.user_id) + session = _create_ephemeral_session(auth.user_id) result = await find_agent_tool._execute( - user_id=api_key.user_id, + user_id=auth.user_id, session=session, query=request.query, ) @@ -105,7 +107,9 @@ async def find_agent( ) async def run_agent( request: RunAgentRequest, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.USE_TOOLS) + ), ) -> dict[str, Any]: """ Run or schedule an agent from the marketplace. @@ -129,9 +133,9 @@ async def run_agent( - execution_started: If agent was run or scheduled successfully - error: If something went wrong """ - session = _create_ephemeral_session(api_key.user_id) + session = _create_ephemeral_session(auth.user_id) result = await run_agent_tool._execute( - user_id=api_key.user_id, + user_id=auth.user_id, session=session, username_agent_slug=request.username_agent_slug, inputs=request.inputs, diff --git a/autogpt_platform/backend/backend/server/external/routes/v1.py b/autogpt_platform/backend/backend/server/external/routes/v1.py index 1b2840acf9..f83673465a 100644 --- a/autogpt_platform/backend/backend/server/external/routes/v1.py +++ b/autogpt_platform/backend/backend/server/external/routes/v1.py @@ -5,6 +5,7 @@ from typing import Annotated, Any, Literal, Optional, Sequence from fastapi import APIRouter, Body, HTTPException, Security from prisma.enums import AgentExecutionStatus, APIKeyPermission +from pydantic import BaseModel, Field from typing_extensions import TypedDict import backend.data.block @@ -12,7 +13,8 @@ import backend.server.v2.store.cache as store_cache import backend.server.v2.store.model as store_model from backend.data import execution as execution_db from backend.data import graph as graph_db -from backend.data.api_key import APIKeyInfo +from backend.data import user as user_db +from backend.data.auth.base import APIAuthorizationInfo from backend.data.block import BlockInput, CompletedBlockOutput from backend.executor.utils import add_graph_execution from backend.server.external.middleware import require_permission @@ -24,27 +26,33 @@ logger = logging.getLogger(__name__) v1_router = APIRouter() -class NodeOutput(TypedDict): - key: str - value: Any +class UserInfoResponse(BaseModel): + id: str + name: Optional[str] + email: str + timezone: str = Field( + description="The user's last known timezone (e.g. 'Europe/Amsterdam'), " + "or 'not-set' if not set" + ) -class ExecutionNode(TypedDict): - node_id: str - input: Any - output: dict[str, Any] +@v1_router.get( + path="/me", + tags=["user", "meta"], +) +async def get_user_info( + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.IDENTITY) + ), +) -> UserInfoResponse: + user = await user_db.get_user_by_id(auth.user_id) - -class ExecutionNodeOutput(TypedDict): - node_id: str - outputs: list[NodeOutput] - - -class GraphExecutionResult(TypedDict): - execution_id: str - status: str - nodes: list[ExecutionNode] - output: Optional[list[dict[str, str]]] + return UserInfoResponse( + id=user.id, + name=user.name, + email=user.email, + timezone=user.timezone, + ) @v1_router.get( @@ -65,7 +73,9 @@ async def get_graph_blocks() -> Sequence[dict[Any, Any]]: async def execute_graph_block( block_id: str, data: BlockInput, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_BLOCK)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.EXECUTE_BLOCK) + ), ) -> CompletedBlockOutput: obj = backend.data.block.get_block(block_id) if not obj: @@ -85,12 +95,14 @@ async def execute_graph( graph_id: str, graph_version: int, node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)], - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_GRAPH)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.EXECUTE_GRAPH) + ), ) -> dict[str, Any]: try: graph_exec = await add_graph_execution( graph_id=graph_id, - user_id=api_key.user_id, + user_id=auth.user_id, inputs=node_input, graph_version=graph_version, ) @@ -100,6 +112,19 @@ async def execute_graph( raise HTTPException(status_code=400, detail=msg) +class ExecutionNode(TypedDict): + node_id: str + input: Any + output: dict[str, Any] + + +class GraphExecutionResult(TypedDict): + execution_id: str + status: str + nodes: list[ExecutionNode] + output: Optional[list[dict[str, str]]] + + @v1_router.get( path="/graphs/{graph_id}/executions/{graph_exec_id}/results", tags=["graphs"], @@ -107,10 +132,12 @@ async def execute_graph( async def get_graph_execution_results( graph_id: str, graph_exec_id: str, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.READ_GRAPH)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.READ_GRAPH) + ), ) -> GraphExecutionResult: graph_exec = await execution_db.get_graph_execution( - user_id=api_key.user_id, + user_id=auth.user_id, execution_id=graph_exec_id, include_node_executions=True, ) @@ -122,7 +149,7 @@ async def get_graph_execution_results( if not await graph_db.get_graph( graph_id=graph_exec.graph_id, version=graph_exec.graph_version, - user_id=api_key.user_id, + user_id=auth.user_id, ): raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") diff --git a/autogpt_platform/backend/backend/server/model.py b/autogpt_platform/backend/backend/server/model.py index 1d7b79cd7c..5e13e20450 100644 --- a/autogpt_platform/backend/backend/server/model.py +++ b/autogpt_platform/backend/backend/server/model.py @@ -4,7 +4,7 @@ from typing import Any, Literal, Optional import pydantic from prisma.enums import OnboardingStep -from backend.data.api_key import APIKeyInfo, APIKeyPermission +from backend.data.auth.api_key import APIKeyInfo, APIKeyPermission from backend.data.graph import Graph from backend.util.timezone_name import TimeZoneName diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 556903571c..5db2b18c27 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -21,6 +21,7 @@ import backend.data.db import backend.data.graph import backend.data.user import backend.integrations.webhooks.utils +import backend.server.routers.oauth import backend.server.routers.postmark.postmark import backend.server.routers.v1 import backend.server.v2.admin.credit_admin_routes @@ -297,6 +298,11 @@ app.include_router( tags=["v2", "chat"], prefix="/api/chat", ) +app.include_router( + backend.server.routers.oauth.router, + tags=["oauth"], + prefix="/api/oauth", +) app.mount("/external-api", external_app) diff --git a/autogpt_platform/backend/backend/server/routers/oauth.py b/autogpt_platform/backend/backend/server/routers/oauth.py new file mode 100644 index 0000000000..55f591427a --- /dev/null +++ b/autogpt_platform/backend/backend/server/routers/oauth.py @@ -0,0 +1,833 @@ +""" +OAuth 2.0 Provider Endpoints + +Implements OAuth 2.0 Authorization Code flow with PKCE support. + +Flow: +1. User clicks "Login with AutoGPT" in 3rd party app +2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state +3. User sees consent screen (if not already logged in, redirects to login first) +4. User approves → backend creates authorization code +5. User redirected back to app with code +6. App exchanges code for access/refresh tokens at /oauth/token +7. App uses access token to call external API endpoints +""" + +import io +import logging +import os +import uuid +from datetime import datetime +from typing import Literal, Optional +from urllib.parse import urlencode + +from autogpt_libs.auth import get_user_id +from fastapi import APIRouter, Body, HTTPException, Security, UploadFile, status +from gcloud.aio import storage as async_storage +from PIL import Image +from prisma.enums import APIKeyPermission +from pydantic import BaseModel, Field + +from backend.data.auth.oauth import ( + InvalidClientError, + InvalidGrantError, + OAuthApplicationInfo, + TokenIntrospectionResult, + consume_authorization_code, + create_access_token, + create_authorization_code, + create_refresh_token, + get_oauth_application, + get_oauth_application_by_id, + introspect_token, + list_user_oauth_applications, + refresh_tokens, + revoke_access_token, + revoke_refresh_token, + update_oauth_application, + validate_client_credentials, + validate_redirect_uri, + validate_scopes, +) +from backend.util.settings import Settings +from backend.util.virus_scanner import scan_content_safe + +settings = Settings() +logger = logging.getLogger(__name__) + +router = APIRouter() + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + + +class TokenResponse(BaseModel): + """OAuth 2.0 token response""" + + token_type: Literal["Bearer"] = "Bearer" + access_token: str + access_token_expires_at: datetime + refresh_token: str + refresh_token_expires_at: datetime + scopes: list[str] + + +class ErrorResponse(BaseModel): + """OAuth 2.0 error response""" + + error: str + error_description: Optional[str] = None + + +class OAuthApplicationPublicInfo(BaseModel): + """Public information about an OAuth application (for consent screen)""" + + name: str + description: Optional[str] = None + logo_url: Optional[str] = None + scopes: list[str] + + +# ============================================================================ +# Application Info Endpoint +# ============================================================================ + + +@router.get( + "/app/{client_id}", + responses={ + 404: {"description": "Application not found or disabled"}, + }, +) +async def get_oauth_app_info( + client_id: str, user_id: str = Security(get_user_id) +) -> OAuthApplicationPublicInfo: + """ + Get public information about an OAuth application. + + This endpoint is used by the consent screen to display application details + to the user before they authorize access. + + Returns: + - name: Application name + - description: Application description (if provided) + - scopes: List of scopes the application is allowed to request + """ + app = await get_oauth_application(client_id) + if not app or not app.is_active: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found", + ) + + return OAuthApplicationPublicInfo( + name=app.name, + description=app.description, + logo_url=app.logo_url, + scopes=[s.value for s in app.scopes], + ) + + +# ============================================================================ +# Authorization Endpoint +# ============================================================================ + + +class AuthorizeRequest(BaseModel): + """OAuth 2.0 authorization request""" + + client_id: str = Field(description="Client identifier") + redirect_uri: str = Field(description="Redirect URI") + scopes: list[str] = Field(description="List of scopes") + state: str = Field(description="Anti-CSRF token from client") + response_type: str = Field( + default="code", description="Must be 'code' for authorization code flow" + ) + code_challenge: str = Field(description="PKCE code challenge (required)") + code_challenge_method: Literal["S256", "plain"] = Field( + default="S256", description="PKCE code challenge method (S256 recommended)" + ) + + +class AuthorizeResponse(BaseModel): + """OAuth 2.0 authorization response with redirect URL""" + + redirect_url: str = Field(description="URL to redirect the user to") + + +@router.post("/authorize") +async def authorize( + request: AuthorizeRequest = Body(), + user_id: str = Security(get_user_id), +) -> AuthorizeResponse: + """ + OAuth 2.0 Authorization Endpoint + + User must be logged in (authenticated with Supabase JWT). + This endpoint creates an authorization code and returns a redirect URL. + + PKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests. + + The frontend consent screen should call this endpoint after the user approves, + then redirect the user to the returned `redirect_url`. + + Request Body: + - client_id: The OAuth application's client ID + - redirect_uri: Where to redirect after authorization (must match registered URI) + - scopes: List of permissions (e.g., "EXECUTE_GRAPH READ_GRAPH") + - state: Anti-CSRF token provided by client (will be returned in redirect) + - response_type: Must be "code" (for authorization code flow) + - code_challenge: PKCE code challenge (required) + - code_challenge_method: "S256" (recommended) or "plain" + + Returns: + - redirect_url: The URL to redirect the user to (includes authorization code) + + Error cases return a redirect_url with error parameters, or raise HTTPException + for critical errors (like invalid redirect_uri). + """ + try: + # Validate response_type + if request.response_type != "code": + return _error_redirect_url( + request.redirect_uri, + request.state, + "unsupported_response_type", + "Only 'code' response type is supported", + ) + + # Get application + app = await get_oauth_application(request.client_id) + if not app: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_client", + "Unknown client_id", + ) + + if not app.is_active: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_client", + "Application is not active", + ) + + # Validate redirect URI + if not validate_redirect_uri(app, request.redirect_uri): + # For invalid redirect_uri, we can't redirect safely + # Must return error instead + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=( + "Invalid redirect_uri. " + f"Must be one of: {', '.join(app.redirect_uris)}" + ), + ) + + # Parse and validate scopes + try: + requested_scopes = [APIKeyPermission(s.strip()) for s in request.scopes] + except ValueError as e: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + f"Invalid scope: {e}", + ) + + if not requested_scopes: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + "At least one scope is required", + ) + + if not validate_scopes(app, requested_scopes): + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + "Application is not authorized for all requested scopes. " + f"Allowed: {', '.join(s.value for s in app.scopes)}", + ) + + # Create authorization code + auth_code = await create_authorization_code( + application_id=app.id, + user_id=user_id, + scopes=requested_scopes, + redirect_uri=request.redirect_uri, + code_challenge=request.code_challenge, + code_challenge_method=request.code_challenge_method, + ) + + # Build redirect URL with authorization code + params = { + "code": auth_code.code, + "state": request.state, + } + redirect_url = f"{request.redirect_uri}?{urlencode(params)}" + + logger.info( + f"Authorization code issued for user #{user_id} " + f"and app {app.name} (#{app.id})" + ) + + return AuthorizeResponse(redirect_url=redirect_url) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error in authorization endpoint: {e}", exc_info=True) + return _error_redirect_url( + request.redirect_uri, + request.state, + "server_error", + "An unexpected error occurred", + ) + + +def _error_redirect_url( + redirect_uri: str, + state: str, + error: str, + error_description: Optional[str] = None, +) -> AuthorizeResponse: + """Helper to build redirect URL with OAuth error parameters""" + params = { + "error": error, + "state": state, + } + if error_description: + params["error_description"] = error_description + + redirect_url = f"{redirect_uri}?{urlencode(params)}" + return AuthorizeResponse(redirect_url=redirect_url) + + +# ============================================================================ +# Token Endpoint +# ============================================================================ + + +class TokenRequestByCode(BaseModel): + grant_type: Literal["authorization_code"] + code: str = Field(description="Authorization code") + redirect_uri: str = Field( + description="Redirect URI (must match authorization request)" + ) + client_id: str + client_secret: str + code_verifier: str = Field(description="PKCE code verifier") + + +class TokenRequestByRefreshToken(BaseModel): + grant_type: Literal["refresh_token"] + refresh_token: str + client_id: str + client_secret: str + + +@router.post("/token") +async def token( + request: TokenRequestByCode | TokenRequestByRefreshToken = Body(), +) -> TokenResponse: + """ + OAuth 2.0 Token Endpoint + + Exchanges authorization code or refresh token for access token. + + Grant Types: + 1. authorization_code: Exchange authorization code for tokens + - Required: grant_type, code, redirect_uri, client_id, client_secret + - Optional: code_verifier (required if PKCE was used) + + 2. refresh_token: Exchange refresh token for new access token + - Required: grant_type, refresh_token, client_id, client_secret + + Returns: + - access_token: Bearer token for API access (1 hour TTL) + - token_type: "Bearer" + - expires_in: Seconds until access token expires + - refresh_token: Token for refreshing access (30 days TTL) + - scopes: List of scopes + """ + # Validate client credentials + try: + app = await validate_client_credentials( + request.client_id, request.client_secret + ) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Handle authorization_code grant + if request.grant_type == "authorization_code": + # Consume authorization code + try: + user_id, scopes = await consume_authorization_code( + code=request.code, + application_id=app.id, + redirect_uri=request.redirect_uri, + code_verifier=request.code_verifier, + ) + except InvalidGrantError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + + # Create access and refresh tokens + access_token = await create_access_token(app.id, user_id, scopes) + refresh_token = await create_refresh_token(app.id, user_id, scopes) + + logger.info( + f"Access token issued for user #{user_id} and app {app.name} (#{app.id})" + "via authorization code" + ) + + if not access_token.token or not refresh_token.token: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate tokens", + ) + + return TokenResponse( + token_type="Bearer", + access_token=access_token.token.get_secret_value(), + access_token_expires_at=access_token.expires_at, + refresh_token=refresh_token.token.get_secret_value(), + refresh_token_expires_at=refresh_token.expires_at, + scopes=list(s.value for s in scopes), + ) + + # Handle refresh_token grant + elif request.grant_type == "refresh_token": + # Refresh access token + try: + new_access_token, new_refresh_token = await refresh_tokens( + request.refresh_token, app.id + ) + except InvalidGrantError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + + logger.info( + f"Tokens refreshed for user #{new_access_token.user_id} " + f"by app {app.name} (#{app.id})" + ) + + if not new_access_token.token or not new_refresh_token.token: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate tokens", + ) + + return TokenResponse( + token_type="Bearer", + access_token=new_access_token.token.get_secret_value(), + access_token_expires_at=new_access_token.expires_at, + refresh_token=new_refresh_token.token.get_secret_value(), + refresh_token_expires_at=new_refresh_token.expires_at, + scopes=list(s.value for s in new_access_token.scopes), + ) + + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Unsupported grant_type: {request.grant_type}. " + "Must be 'authorization_code' or 'refresh_token'", + ) + + +# ============================================================================ +# Token Introspection Endpoint +# ============================================================================ + + +@router.post("/introspect") +async def introspect( + token: str = Body(description="Token to introspect"), + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body( + None, description="Hint about token type ('access_token' or 'refresh_token')" + ), + client_id: str = Body(description="Client identifier"), + client_secret: str = Body(description="Client secret"), +) -> TokenIntrospectionResult: + """ + OAuth 2.0 Token Introspection Endpoint (RFC 7662) + + Allows clients to check if a token is valid and get its metadata. + + Returns: + - active: Whether the token is currently active + - scopes: List of authorized scopes (if active) + - client_id: The client the token was issued to (if active) + - user_id: The user the token represents (if active) + - exp: Expiration timestamp (if active) + - token_type: "access_token" or "refresh_token" (if active) + """ + # Validate client credentials + try: + await validate_client_credentials(client_id, client_secret) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Introspect the token + return await introspect_token(token, token_type_hint) + + +# ============================================================================ +# Token Revocation Endpoint +# ============================================================================ + + +@router.post("/revoke") +async def revoke( + token: str = Body(description="Token to revoke"), + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body( + None, description="Hint about token type ('access_token' or 'refresh_token')" + ), + client_id: str = Body(description="Client identifier"), + client_secret: str = Body(description="Client secret"), +): + """ + OAuth 2.0 Token Revocation Endpoint (RFC 7009) + + Allows clients to revoke an access or refresh token. + + Note: Revoking a refresh token does NOT revoke associated access tokens. + Revoking an access token does NOT revoke the associated refresh token. + """ + # Validate client credentials + try: + app = await validate_client_credentials(client_id, client_secret) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Try to revoke as access token first + # Note: We pass app.id to ensure the token belongs to the authenticated app + if token_type_hint != "refresh_token": + revoked = await revoke_access_token(token, app.id) + if revoked: + logger.info( + f"Access token revoked for app {app.name} (#{app.id}); " + f"user #{revoked.user_id}" + ) + return {"status": "ok"} + + # Try to revoke as refresh token + revoked = await revoke_refresh_token(token, app.id) + if revoked: + logger.info( + f"Refresh token revoked for app {app.name} (#{app.id}); " + f"user #{revoked.user_id}" + ) + return {"status": "ok"} + + # Per RFC 7009, revocation endpoint returns 200 even if token not found + # or if token belongs to a different application. + # This prevents token scanning attacks. + logger.warning(f"Unsuccessful token revocation attempt by app {app.name} #{app.id}") + return {"status": "ok"} + + +# ============================================================================ +# Application Management Endpoints (for app owners) +# ============================================================================ + + +@router.get("/apps/mine") +async def list_my_oauth_apps( + user_id: str = Security(get_user_id), +) -> list[OAuthApplicationInfo]: + """ + List all OAuth applications owned by the current user. + + Returns a list of OAuth applications with their details including: + - id, name, description, logo_url + - client_id (public identifier) + - redirect_uris, grant_types, scopes + - is_active status + - created_at, updated_at timestamps + + Note: client_secret is never returned for security reasons. + """ + return await list_user_oauth_applications(user_id) + + +@router.patch("/apps/{app_id}/status") +async def update_app_status( + app_id: str, + user_id: str = Security(get_user_id), + is_active: bool = Body(description="Whether the app should be active", embed=True), +) -> OAuthApplicationInfo: + """ + Enable or disable an OAuth application. + + Only the application owner can update the status. + When disabled, the application cannot be used for new authorizations + and existing access tokens will fail validation. + + Returns the updated application info. + """ + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + is_active=is_active, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + action = "enabled" if is_active else "disabled" + logger.info(f"OAuth app {updated_app.name} (#{app_id}) {action} by user #{user_id}") + + return updated_app + + +class UpdateAppLogoRequest(BaseModel): + logo_url: str = Field(description="URL of the uploaded logo image") + + +@router.patch("/apps/{app_id}/logo") +async def update_app_logo( + app_id: str, + request: UpdateAppLogoRequest = Body(), + user_id: str = Security(get_user_id), +) -> OAuthApplicationInfo: + """ + Update the logo URL for an OAuth application. + + Only the application owner can update the logo. + The logo should be uploaded first using the media upload endpoint, + then this endpoint is called with the resulting URL. + + Logo requirements: + - Must be square (1:1 aspect ratio) + - Minimum 512x512 pixels + - Maximum 2048x2048 pixels + + Returns the updated application info. + """ + if ( + not (app := await get_oauth_application_by_id(app_id)) + or app.owner_id != user_id + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="OAuth App not found", + ) + + # Delete the current app logo file (if any and it's in our cloud storage) + await _delete_app_current_logo_file(app) + + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + logo_url=request.logo_url, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + logger.info( + f"OAuth app {updated_app.name} (#{app_id}) logo updated by user #{user_id}" + ) + + return updated_app + + +# Logo upload constraints +LOGO_MIN_SIZE = 512 +LOGO_MAX_SIZE = 2048 +LOGO_ALLOWED_TYPES = {"image/jpeg", "image/png", "image/webp"} +LOGO_MAX_FILE_SIZE = 3 * 1024 * 1024 # 3MB + + +@router.post("/apps/{app_id}/logo/upload") +async def upload_app_logo( + app_id: str, + file: UploadFile, + user_id: str = Security(get_user_id), +) -> OAuthApplicationInfo: + """ + Upload a logo image for an OAuth application. + + Requirements: + - Image must be square (1:1 aspect ratio) + - Minimum 512x512 pixels + - Maximum 2048x2048 pixels + - Allowed formats: JPEG, PNG, WebP + - Maximum file size: 3MB + + The image is uploaded to cloud storage and the app's logoUrl is updated. + Returns the updated application info. + """ + # Verify ownership to reduce vulnerability to DoS(torage) or DoM(oney) attacks + if ( + not (app := await get_oauth_application_by_id(app_id)) + or app.owner_id != user_id + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="OAuth App not found", + ) + + # Check GCS configuration + if not settings.config.media_gcs_bucket_name: + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Media storage is not configured", + ) + + # Validate content type + content_type = file.content_type + if content_type not in LOGO_ALLOWED_TYPES: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid file type. Allowed: JPEG, PNG, WebP. Got: {content_type}", + ) + + # Read file content + try: + file_bytes = await file.read() + except Exception as e: + logger.error(f"Error reading logo file: {e}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to read uploaded file", + ) + + # Check file size + if len(file_bytes) > LOGO_MAX_FILE_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=( + "File too large. " + f"Maximum size is {LOGO_MAX_FILE_SIZE // 1024 // 1024}MB" + ), + ) + + # Validate image dimensions + try: + image = Image.open(io.BytesIO(file_bytes)) + width, height = image.size + + if width != height: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo must be square. Got {width}x{height}", + ) + + if width < LOGO_MIN_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo too small. Minimum {LOGO_MIN_SIZE}x{LOGO_MIN_SIZE}. " + f"Got {width}x{height}", + ) + + if width > LOGO_MAX_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo too large. Maximum {LOGO_MAX_SIZE}x{LOGO_MAX_SIZE}. " + f"Got {width}x{height}", + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error validating logo image: {e}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid image file", + ) + + # Scan for viruses + filename = file.filename or "logo" + await scan_content_safe(file_bytes, filename=filename) + + # Generate unique filename + file_ext = os.path.splitext(filename)[1].lower() or ".png" + unique_filename = f"{uuid.uuid4()}{file_ext}" + storage_path = f"oauth-apps/{app_id}/logo/{unique_filename}" + + # Upload to GCS + try: + async with async_storage.Storage() as async_client: + bucket_name = settings.config.media_gcs_bucket_name + + await async_client.upload( + bucket_name, storage_path, file_bytes, content_type=content_type + ) + + logo_url = f"https://storage.googleapis.com/{bucket_name}/{storage_path}" + except Exception as e: + logger.error(f"Error uploading logo to GCS: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to upload logo", + ) + + # Delete the current app logo file (if any and it's in our cloud storage) + await _delete_app_current_logo_file(app) + + # Update the app with the new logo URL + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + logo_url=logo_url, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + logger.info( + f"OAuth app {updated_app.name} (#{app_id}) logo uploaded by user #{user_id}" + ) + + return updated_app + + +async def _delete_app_current_logo_file(app: OAuthApplicationInfo): + """ + Delete the current logo file for the given app, if there is one in our cloud storage + """ + bucket_name = settings.config.media_gcs_bucket_name + storage_base_url = f"https://storage.googleapis.com/{bucket_name}/" + + if app.logo_url and app.logo_url.startswith(storage_base_url): + # Parse blob path from URL: https://storage.googleapis.com/{bucket}/{path} + old_path = app.logo_url.replace(storage_base_url, "") + try: + async with async_storage.Storage() as async_client: + await async_client.delete(bucket_name, old_path) + logger.info(f"Deleted old logo for OAuth app #{app.id}: {old_path}") + except Exception as e: + # Log but don't fail - the new logo was uploaded successfully + logger.warning( + f"Failed to delete old logo for OAuth app #{app.id}: {e}", exc_info=e + ) diff --git a/autogpt_platform/backend/backend/server/routers/oauth_test.py b/autogpt_platform/backend/backend/server/routers/oauth_test.py new file mode 100644 index 0000000000..8ec6911152 --- /dev/null +++ b/autogpt_platform/backend/backend/server/routers/oauth_test.py @@ -0,0 +1,1784 @@ +""" +End-to-end integration tests for OAuth 2.0 Provider Endpoints. + +These tests hit the actual API endpoints and database, testing the complete +OAuth flow from endpoint to database. + +Tests cover: +1. Authorization endpoint - creating authorization codes +2. Token endpoint - exchanging codes for tokens and refreshing +3. Token introspection endpoint - checking token validity +4. Token revocation endpoint - revoking tokens +5. Complete OAuth flow end-to-end +""" + +import base64 +import hashlib +import secrets +import uuid +from typing import AsyncGenerator + +import httpx +import pytest +from autogpt_libs.api_key.keysmith import APIKeySmith +from prisma.enums import APIKeyPermission +from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken +from prisma.models import OAuthApplication as PrismaOAuthApplication +from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode +from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken +from prisma.models import User as PrismaUser + +from backend.server.rest_api import app + +keysmith = APIKeySmith() + + +# ============================================================================ +# Test Fixtures +# ============================================================================ + + +@pytest.fixture +def test_user_id() -> str: + """Test user ID for OAuth tests.""" + return str(uuid.uuid4()) + + +@pytest.fixture +async def test_user(server, test_user_id: str): + """Create a test user in the database.""" + await PrismaUser.prisma().create( + data={ + "id": test_user_id, + "email": f"oauth-test-{test_user_id}@example.com", + "name": "OAuth Test User", + } + ) + + yield test_user_id + + # Cleanup - delete in correct order due to foreign key constraints + await PrismaOAuthAccessToken.prisma().delete_many(where={"userId": test_user_id}) + await PrismaOAuthRefreshToken.prisma().delete_many(where={"userId": test_user_id}) + await PrismaOAuthAuthorizationCode.prisma().delete_many( + where={"userId": test_user_id} + ) + await PrismaOAuthApplication.prisma().delete_many(where={"ownerId": test_user_id}) + await PrismaUser.prisma().delete(where={"id": test_user_id}) + + +@pytest.fixture +async def test_oauth_app(test_user: str): + """Create a test OAuth application in the database.""" + app_id = str(uuid.uuid4()) + client_id = f"test_client_{secrets.token_urlsafe(8)}" + # Secret must start with "agpt_" prefix for keysmith verification to work + client_secret_plaintext = f"agpt_secret_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Test OAuth App", + "description": "Test application for integration tests", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": [ + "https://example.com/callback", + "http://localhost:3000/callback", + ], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://example.com/callback", + } + + # Cleanup is handled by test_user fixture (cascade delete) + + +def generate_pkce() -> tuple[str, str]: + """Generate PKCE code verifier and challenge.""" + verifier = secrets.token_urlsafe(32) + challenge = ( + base64.urlsafe_b64encode(hashlib.sha256(verifier.encode("ascii")).digest()) + .decode("ascii") + .rstrip("=") + ) + return verifier, challenge + + +@pytest.fixture +def pkce_credentials() -> tuple[str, str]: + """Generate PKCE code verifier and challenge as a fixture.""" + return generate_pkce() + + +@pytest.fixture +async def client(server, test_user: str) -> AsyncGenerator[httpx.AsyncClient, None]: + """ + Create an async HTTP client that talks directly to the FastAPI app. + + Uses ASGI transport so we don't need an actual HTTP server running. + Also overrides get_user_id dependency to return our test user. + + Depends on `server` to ensure the DB is connected and `test_user` to ensure + the user exists in the database before running tests. + """ + from autogpt_libs.auth import get_user_id + + # Override get_user_id dependency to return our test user + def override_get_user_id(): + return test_user + + # Store original override if any + original_override = app.dependency_overrides.get(get_user_id) + + # Set our override + app.dependency_overrides[get_user_id] = override_get_user_id + + try: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url="http://test", + ) as http_client: + yield http_client + finally: + # Restore original override + if original_override is not None: + app.dependency_overrides[get_user_id] = original_override + else: + app.dependency_overrides.pop(get_user_id, None) + + +# ============================================================================ +# Authorization Endpoint Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_creates_code_in_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test that authorization endpoint creates a code in the database.""" + verifier, challenge = pkce_credentials + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "test_state_123", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + redirect_url = response.json()["redirect_url"] + + # Parse the redirect URL to get the authorization code + from urllib.parse import parse_qs, urlparse + + parsed = urlparse(redirect_url) + query_params = parse_qs(parsed.query) + + assert "code" in query_params, f"Expected 'code' in query params: {query_params}" + auth_code = query_params["code"][0] + assert query_params["state"][0] == "test_state_123" + + # Verify code exists in database + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + + assert db_code is not None + assert db_code.userId == test_user + assert db_code.applicationId == test_oauth_app["id"] + assert db_code.redirectUri == test_oauth_app["redirect_uri"] + assert APIKeyPermission.EXECUTE_GRAPH in db_code.scopes + assert APIKeyPermission.READ_GRAPH in db_code.scopes + assert db_code.usedAt is None # Not yet consumed + assert db_code.codeChallenge == challenge + assert db_code.codeChallengeMethod == "S256" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_with_pkce_stores_challenge( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test that PKCE code challenge is stored correctly.""" + verifier, challenge = pkce_credentials + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "pkce_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + + from urllib.parse import parse_qs, urlparse + + auth_code = parse_qs(urlparse(response.json()["redirect_url"]).query)["code"][0] + + # Verify PKCE challenge is stored + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + + assert db_code is not None + assert db_code.codeChallenge == challenge + assert db_code.codeChallengeMethod == "S256" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_client_returns_error( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that invalid client_id returns error in redirect.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": "nonexistent_client_id", + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "error_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_client" + + +@pytest.fixture +async def inactive_oauth_app(test_user: str): + """Create an inactive test OAuth application in the database.""" + app_id = str(uuid.uuid4()) + client_id = f"inactive_client_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_secret_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Inactive OAuth App", + "description": "Inactive test application", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://example.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": False, # Inactive! + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://example.com/callback", + } + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_inactive_app( + client: httpx.AsyncClient, + test_user: str, + inactive_oauth_app: dict, +): + """Test that authorization with inactive app returns error.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": inactive_oauth_app["client_id"], + "redirect_uri": inactive_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "inactive_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_client" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_redirect_uri( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with unregistered redirect_uri returns HTTP error.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": "https://malicious.com/callback", + "scopes": ["EXECUTE_GRAPH"], + "state": "invalid_redirect_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + # Invalid redirect_uri should return HTTP 400, not a redirect + assert response.status_code == 400 + assert "redirect_uri" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_scope( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with invalid scope value.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["INVALID_SCOPE_NAME"], + "state": "invalid_scope_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_scope" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_unauthorized_scope( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization requesting scope not authorized for app.""" + _, challenge = generate_pkce() + + # The test_oauth_app only has EXECUTE_GRAPH and READ_GRAPH scopes + # DELETE_GRAPH is not in the app's allowed scopes + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["DELETE_GRAPH"], # Not authorized for this app + "state": "unauthorized_scope_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_scope" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_unsupported_response_type( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with unsupported response_type.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "unsupported_response_test", + "response_type": "token", # Implicit flow not supported + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "unsupported_response_type" + + +# ============================================================================ +# Token Endpoint Integration Tests - Authorization Code Grant +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_exchange_creates_tokens_in_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that token exchange creates access and refresh tokens in database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # First get an authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "token_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Exchange code for tokens + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert token_response.status_code == 200 + tokens = token_response.json() + + assert "access_token" in tokens + assert "refresh_token" in tokens + assert tokens["token_type"] == "Bearer" + assert "EXECUTE_GRAPH" in tokens["scopes"] + assert "READ_GRAPH" in tokens["scopes"] + + # Verify access token exists in database (hashed) + access_token_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_access_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_token_hash} + ) + + assert db_access_token is not None + assert db_access_token.userId == test_user + assert db_access_token.applicationId == test_oauth_app["id"] + assert db_access_token.revokedAt is None + + # Verify refresh token exists in database (hashed) + refresh_token_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + db_refresh_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_token_hash} + ) + + assert db_refresh_token is not None + assert db_refresh_token.userId == test_user + assert db_refresh_token.applicationId == test_oauth_app["id"] + assert db_refresh_token.revokedAt is None + + # Verify authorization code is marked as used + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + assert db_code is not None + assert db_code.usedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorization_code_cannot_be_reused( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that authorization code can only be used once.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "reuse_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # First exchange - should succeed + first_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + assert first_response.status_code == 200 + + # Second exchange - should fail + second_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + assert second_response.status_code == 400 + assert "already used" in second_response.json()["detail"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_exchange_with_invalid_client_secret( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that token exchange fails with invalid client secret.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "bad_secret_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with wrong secret + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret", + "code_verifier": verifier, + }, + ) + + assert response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_invalid_code( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with invalid/nonexistent authorization code.""" + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": "nonexistent_invalid_code_xyz", + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "", + }, + ) + + assert response.status_code == 400 + assert "not found" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_expired( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with expired authorization code.""" + from datetime import datetime, timedelta, timezone + + # Create an expired authorization code directly in the database + expired_code = f"expired_code_{secrets.token_urlsafe(16)}" + now = datetime.now(timezone.utc) + + await PrismaOAuthAuthorizationCode.prisma().create( + data={ + "code": expired_code, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "redirectUri": test_oauth_app["redirect_uri"], + "expiresAt": now - timedelta(hours=1), # Already expired + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": expired_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "", + }, + ) + + assert response.status_code == 400 + assert "expired" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_redirect_uri_mismatch( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with mismatched redirect_uri.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code with one redirect_uri + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "redirect_mismatch_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with different redirect_uri + # Note: localhost:3000 is in the app's registered redirect_uris + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + # Different redirect_uri from authorization request + "redirect_uri": "http://localhost:3000/callback", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert response.status_code == 400 + assert "redirect_uri" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_pkce_failure( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test token exchange with PKCE verification failure (wrong verifier).""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = pkce_credentials + + # Get authorization code with PKCE challenge + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "pkce_failure_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with wrong verifier + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "wrong_verifier_that_does_not_match", + }, + ) + + assert response.status_code == 400 + assert "pkce" in response.json()["detail"].lower() + + +# ============================================================================ +# Token Endpoint Integration Tests - Refresh Token Grant +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_refresh_token_creates_new_tokens( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that refresh token grant creates new access and refresh tokens.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get initial tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + initial_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + initial_tokens = initial_response.json() + + # Use refresh token to get new tokens + refresh_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": initial_tokens["refresh_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert refresh_response.status_code == 200 + new_tokens = refresh_response.json() + + # Tokens should be different + assert new_tokens["access_token"] != initial_tokens["access_token"] + assert new_tokens["refresh_token"] != initial_tokens["refresh_token"] + + # Old refresh token should be revoked in database + old_refresh_hash = hashlib.sha256( + initial_tokens["refresh_token"].encode() + ).hexdigest() + old_db_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": old_refresh_hash} + ) + assert old_db_token is not None + assert old_db_token.revokedAt is not None + + # New tokens should exist and be valid + new_access_hash = hashlib.sha256(new_tokens["access_token"].encode()).hexdigest() + new_db_access = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": new_access_hash} + ) + assert new_db_access is not None + assert new_db_access.revokedAt is None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_invalid_token( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test token refresh with invalid/nonexistent refresh token.""" + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": "completely_invalid_refresh_token_xyz", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "not found" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_expired( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token refresh with expired refresh token.""" + from datetime import datetime, timedelta, timezone + + # Create an expired refresh token directly in the database + expired_token_value = f"expired_refresh_{secrets.token_urlsafe(16)}" + expired_token_hash = hashlib.sha256(expired_token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": expired_token_hash, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now - timedelta(days=1), # Already expired + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": expired_token_value, + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "expired" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_revoked( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token refresh with revoked refresh token.""" + from datetime import datetime, timedelta, timezone + + # Create a revoked refresh token directly in the database + revoked_token_value = f"revoked_refresh_{secrets.token_urlsafe(16)}" + revoked_token_hash = hashlib.sha256(revoked_token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": revoked_token_hash, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(days=30), # Not expired + "revokedAt": now - timedelta(hours=1), # But revoked + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": revoked_token_value, + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "revoked" in response.json()["detail"].lower() + + +@pytest.fixture +async def other_oauth_app(test_user: str): + """Create a second OAuth application for cross-app tests.""" + app_id = str(uuid.uuid4()) + client_id = f"other_client_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_other_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Other OAuth App", + "description": "Second test application", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://other.example.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://other.example.com/callback", + } + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_wrong_application( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + other_oauth_app: dict, +): + """Test token refresh with token from different application.""" + from datetime import datetime, timedelta, timezone + + # Create a refresh token for `test_oauth_app` + token_value = f"app1_refresh_{secrets.token_urlsafe(16)}" + token_hash = hashlib.sha256(token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": token_hash, + "applicationId": test_oauth_app["id"], # Belongs to test_oauth_app + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(days=30), + } + ) + + # Try to use it with `other_oauth_app` + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": token_value, + "client_id": other_oauth_app["client_id"], + "client_secret": other_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "does not belong" in response.json()["detail"].lower() + + +# ============================================================================ +# Token Introspection Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_valid_access_token( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test introspection returns correct info for valid access token.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "introspect_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Introspect the access token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + data = introspect_response.json() + + assert data["active"] is True + assert data["token_type"] == "access_token" + assert data["user_id"] == test_user + assert data["client_id"] == test_oauth_app["client_id"] + assert "EXECUTE_GRAPH" in data["scopes"] + assert "READ_GRAPH" in data["scopes"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_invalid_token_returns_inactive( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test introspection returns inactive for non-existent token.""" + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": "completely_invalid_token_that_does_not_exist", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + assert introspect_response.json()["active"] is False + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_active_refresh_token( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test introspection returns correct info for valid refresh token.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens via the full flow + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "introspect_refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Introspect the refresh token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["refresh_token"], + "token_type_hint": "refresh_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + data = introspect_response.json() + + assert data["active"] is True + assert data["token_type"] == "refresh_token" + assert data["user_id"] == test_user + assert data["client_id"] == test_oauth_app["client_id"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_invalid_client( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test introspection with invalid client credentials.""" + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": "some_token", + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret_value", + }, + ) + + assert introspect_response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_validate_access_token_fails_when_app_disabled( + test_user: str, +): + """ + Test that validate_access_token raises InvalidClientError when the app is disabled. + + This tests the security feature where disabling an OAuth application + immediately invalidates all its access tokens. + """ + from datetime import datetime, timedelta, timezone + + from backend.data.auth.oauth import InvalidClientError, validate_access_token + + # Create an OAuth app + app_id = str(uuid.uuid4()) + client_id = f"disable_test_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_disable_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "App To Be Disabled", + "description": "Test app for disabled validation", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://example.com/callback"], + "grantTypes": ["authorization_code"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + # Create an access token directly in the database + token_plaintext = f"test_token_{secrets.token_urlsafe(32)}" + token_hash = hashlib.sha256(token_plaintext.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthAccessToken.prisma().create( + data={ + "token": token_hash, + "applicationId": app_id, + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(hours=1), + } + ) + + # Token should be valid while app is active + token_info, _ = await validate_access_token(token_plaintext) + assert token_info.user_id == test_user + + # Disable the app + await PrismaOAuthApplication.prisma().update( + where={"id": app_id}, + data={"isActive": False}, + ) + + # Token should now fail validation with InvalidClientError + with pytest.raises(InvalidClientError, match="disabled"): + await validate_access_token(token_plaintext) + + # Cleanup + await PrismaOAuthApplication.prisma().delete(where={"id": app_id}) + + +# ============================================================================ +# Token Revocation Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_access_token_updates_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that revoking access token updates database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "revoke_access_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Verify token is not revoked in database + access_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_token_before = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_before is not None + assert db_token_before.revokedAt is None + + # Revoke the token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # Verify token is now revoked in database + db_token_after = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_unknown_token_returns_ok( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test that revoking unknown token returns 200 (per RFC 7009).""" + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": "unknown_token_that_does_not_exist_anywhere", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + # Per RFC 7009, should return 200 even for unknown tokens + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_refresh_token_updates_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that revoking refresh token updates database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "revoke_refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Verify refresh token is not revoked in database + refresh_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + db_token_before = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_hash} + ) + assert db_token_before is not None + assert db_token_before.revokedAt is None + + # Revoke the refresh token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["refresh_token"], + "token_type_hint": "refresh_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # Verify refresh token is now revoked in database + db_token_after = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_invalid_client( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test revocation with invalid client credentials.""" + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": "some_token", + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret_value", + }, + ) + + assert revoke_response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_token_from_different_app_fails_silently( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """ + Test that an app cannot revoke tokens belonging to a different app. + + Per RFC 7009, the endpoint still returns 200 OK (to prevent token scanning), + but the token should remain valid in the database. + """ + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens for app 1 + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "cross_app_revoke_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Create a second OAuth app + app2_id = str(uuid.uuid4()) + app2_client_id = f"test_client_app2_{secrets.token_urlsafe(8)}" + app2_client_secret_plaintext = f"agpt_secret_app2_{secrets.token_urlsafe(16)}" + app2_client_secret_hash, app2_client_secret_salt = keysmith.hash_key( + app2_client_secret_plaintext + ) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app2_id, + "name": "Second Test OAuth App", + "description": "Second test application for cross-app revocation test", + "clientId": app2_client_id, + "clientSecret": app2_client_secret_hash, + "clientSecretSalt": app2_client_secret_salt, + "redirectUris": ["https://other-app.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + # App 2 tries to revoke App 1's access token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": app2_client_id, + "client_secret": app2_client_secret_plaintext, + }, + ) + + # Per RFC 7009, returns 200 OK even if token not found/not owned + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # But the token should NOT be revoked in the database + access_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token is not None + assert db_token.revokedAt is None, "Token should NOT be revoked by different app" + + # Now app 1 revokes its own token - should work + revoke_response2 = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response2.status_code == 200 + + # Token should now be revoked + db_token_after = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None, "Token should be revoked by own app" + + # Cleanup second app + await PrismaOAuthApplication.prisma().delete(where={"id": app2_id}) + + +# ============================================================================ +# Complete End-to-End OAuth Flow Test +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_complete_oauth_flow_end_to_end( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """ + Test the complete OAuth 2.0 flow from authorization to token refresh. + + This is a comprehensive integration test that verifies the entire + OAuth flow works correctly with real API calls and database operations. + """ + from urllib.parse import parse_qs, urlparse + + verifier, challenge = pkce_credentials + + # Step 1: Authorization request with PKCE + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "e2e_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert auth_response.status_code == 200 + + redirect_url = auth_response.json()["redirect_url"] + query = parse_qs(urlparse(redirect_url).query) + + assert query["state"][0] == "e2e_test_state" + auth_code = query["code"][0] + + # Verify authorization code in database + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + assert db_code is not None + assert db_code.codeChallenge == challenge + + # Step 2: Exchange code for tokens + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert token_response.status_code == 200 + tokens = token_response.json() + assert "access_token" in tokens + assert "refresh_token" in tokens + + # Verify code is marked as used + db_code_used = await PrismaOAuthAuthorizationCode.prisma().find_unique_or_raise( + where={"code": auth_code} + ) + assert db_code_used.usedAt is not None + + # Step 3: Introspect access token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + introspect_data = introspect_response.json() + assert introspect_data["active"] is True + assert introspect_data["user_id"] == test_user + + # Step 4: Refresh tokens + refresh_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": tokens["refresh_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert refresh_response.status_code == 200 + new_tokens = refresh_response.json() + assert new_tokens["access_token"] != tokens["access_token"] + assert new_tokens["refresh_token"] != tokens["refresh_token"] + + # Verify old refresh token is revoked + old_refresh_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + old_db_refresh = await PrismaOAuthRefreshToken.prisma().find_unique_or_raise( + where={"token": old_refresh_hash} + ) + assert old_db_refresh.revokedAt is not None + + # Step 5: Verify new access token works + new_introspect = await client.post( + "/api/oauth/introspect", + json={ + "token": new_tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert new_introspect.status_code == 200 + assert new_introspect.json()["active"] is True + + # Step 6: Revoke new access token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": new_tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + + # Step 7: Verify revoked token is inactive + final_introspect = await client.post( + "/api/oauth/introspect", + json={ + "token": new_tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert final_introspect.status_code == 200 + assert final_introspect.json()["active"] is False + + # Verify in database + new_access_hash = hashlib.sha256(new_tokens["access_token"].encode()).hexdigest() + db_revoked = await PrismaOAuthAccessToken.prisma().find_unique_or_raise( + where={"token": new_access_hash} + ) + assert db_revoked.revokedAt is not None diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/server/routers/v1.py index d74d4ecdf7..e5e74690f8 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/server/routers/v1.py @@ -31,9 +31,9 @@ from typing_extensions import Optional, TypedDict import backend.server.integrations.router import backend.server.routers.analytics import backend.server.v2.library.db as library_db -from backend.data import api_key as api_key_db from backend.data import execution as execution_db from backend.data import graph as graph_db +from backend.data.auth import api_key as api_key_db from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks from backend.data.credit import ( AutoTopUpConfig, diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 4eb45dc972..0f17b1215c 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -362,6 +362,13 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): description="Hours between cloud storage cleanup runs (1-24 hours)", ) + oauth_token_cleanup_interval_hours: int = Field( + default=6, + ge=1, + le=24, + description="Hours between OAuth token cleanup runs (1-24 hours)", + ) + upload_file_size_limit_mb: int = Field( default=256, ge=1, diff --git a/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql b/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql new file mode 100644 index 0000000000..9c8672c4c3 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql @@ -0,0 +1,129 @@ +-- CreateTable +CREATE TABLE "OAuthApplication" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + "name" TEXT NOT NULL, + "description" TEXT, + "clientId" TEXT NOT NULL, + "clientSecret" TEXT NOT NULL, + "clientSecretSalt" TEXT NOT NULL, + "redirectUris" TEXT[], + "grantTypes" TEXT[] DEFAULT ARRAY['authorization_code', 'refresh_token']::TEXT[], + "scopes" "APIKeyPermission"[], + "ownerId" TEXT NOT NULL, + "isActive" BOOLEAN NOT NULL DEFAULT true, + + CONSTRAINT "OAuthApplication_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthAuthorizationCode" ( + "id" TEXT NOT NULL, + "code" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "redirectUri" TEXT NOT NULL, + "codeChallenge" TEXT, + "codeChallengeMethod" TEXT, + "usedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthAuthorizationCode_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthAccessToken" ( + "id" TEXT NOT NULL, + "token" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "revokedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthAccessToken_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthRefreshToken" ( + "id" TEXT NOT NULL, + "token" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "revokedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthRefreshToken_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthApplication_clientId_key" ON "OAuthApplication"("clientId"); + +-- CreateIndex +CREATE INDEX "OAuthApplication_clientId_idx" ON "OAuthApplication"("clientId"); + +-- CreateIndex +CREATE INDEX "OAuthApplication_ownerId_idx" ON "OAuthApplication"("ownerId"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthAuthorizationCode_code_key" ON "OAuthAuthorizationCode"("code"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_code_idx" ON "OAuthAuthorizationCode"("code"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_applicationId_userId_idx" ON "OAuthAuthorizationCode"("applicationId", "userId"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_expiresAt_idx" ON "OAuthAuthorizationCode"("expiresAt"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthAccessToken_token_key" ON "OAuthAccessToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_token_idx" ON "OAuthAccessToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_userId_applicationId_idx" ON "OAuthAccessToken"("userId", "applicationId"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_expiresAt_idx" ON "OAuthAccessToken"("expiresAt"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthRefreshToken_token_key" ON "OAuthRefreshToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_token_idx" ON "OAuthRefreshToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_userId_applicationId_idx" ON "OAuthRefreshToken"("userId", "applicationId"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_expiresAt_idx" ON "OAuthRefreshToken"("expiresAt"); + +-- AddForeignKey +ALTER TABLE "OAuthApplication" ADD CONSTRAINT "OAuthApplication_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql b/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql new file mode 100644 index 0000000000..c9c8c76df1 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql @@ -0,0 +1,5 @@ +-- AlterEnum +ALTER TYPE "APIKeyPermission" ADD VALUE 'IDENTITY'; + +-- AlterTable +ALTER TABLE "OAuthApplication" ADD COLUMN "logoUrl" TEXT; diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index a87ae8e71d..fb06b65162 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -115,6 +115,8 @@ format = "linter:format" lint = "linter:lint" test = "run_tests:test" load-store-agents = "test.load_store_agents:run" +export-api-schema = "backend.cli.generate_openapi_json:main" +oauth-tool = "backend.cli.oauth_tool:cli" [tool.isort] profile = "black" diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 121ccab5fc..d81cd4d1b1 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -61,6 +61,12 @@ model User { IntegrationWebhooks IntegrationWebhook[] NotificationBatches UserNotificationBatch[] PendingHumanReviews PendingHumanReview[] + + // OAuth Provider relations + OAuthApplications OAuthApplication[] + OAuthAuthorizationCodes OAuthAuthorizationCode[] + OAuthAccessTokens OAuthAccessToken[] + OAuthRefreshTokens OAuthRefreshToken[] } enum OnboardingStep { @@ -924,6 +930,7 @@ enum SubmissionStatus { } enum APIKeyPermission { + IDENTITY // Info about the authenticated user EXECUTE_GRAPH // Can execute agent graphs READ_GRAPH // Can get graph versions and details EXECUTE_BLOCK // Can execute individual blocks @@ -975,3 +982,113 @@ enum APIKeyStatus { REVOKED SUSPENDED } + +//////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////// +////////////// OAUTH PROVIDER TABLES ////////////////// +//////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////// + +// OAuth2 applications that can access AutoGPT on behalf of users +model OAuthApplication { + id String @id @default(uuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + // Application metadata + name String + description String? + logoUrl String? // URL to app logo stored in GCS + clientId String @unique + clientSecret String // Hashed with Scrypt (same as API keys) + clientSecretSalt String // Salt for Scrypt hashing + + // OAuth configuration + redirectUris String[] // Allowed callback URLs + grantTypes String[] @default(["authorization_code", "refresh_token"]) + scopes APIKeyPermission[] // Which permissions the app can request + + // Application management + ownerId String + Owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade) + isActive Boolean @default(true) + + // Relations + AuthorizationCodes OAuthAuthorizationCode[] + AccessTokens OAuthAccessToken[] + RefreshTokens OAuthRefreshToken[] + + @@index([clientId]) + @@index([ownerId]) +} + +// Temporary authorization codes (10 min TTL) +model OAuthAuthorizationCode { + id String @id @default(uuid()) + code String @unique + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 10 minutes + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + redirectUri String // Must match one from application + + // PKCE (Proof Key for Code Exchange) support + codeChallenge String? + codeChallengeMethod String? // "S256" or "plain" + + usedAt DateTime? // Set when code is consumed + + @@index([code]) + @@index([applicationId, userId]) + @@index([expiresAt]) // For cleanup +} + +// Access tokens (1 hour TTL) +model OAuthAccessToken { + id String @id @default(uuid()) + token String @unique // SHA256 hash of plaintext token + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 1 hour + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + + revokedAt DateTime? // Set when token is revoked + + @@index([token]) // For token lookup + @@index([userId, applicationId]) + @@index([expiresAt]) // For cleanup +} + +// Refresh tokens (30 days TTL) +model OAuthRefreshToken { + id String @id @default(uuid()) + token String @unique // SHA256 hash of plaintext token + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 30 days + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + + revokedAt DateTime? // Set when token is revoked + + @@index([token]) // For token lookup + @@index([userId, applicationId]) + @@index([expiresAt]) // For cleanup +} diff --git a/autogpt_platform/backend/test/e2e_test_data.py b/autogpt_platform/backend/test/e2e_test_data.py index 013c8c11a7..943c506f5c 100644 --- a/autogpt_platform/backend/test/e2e_test_data.py +++ b/autogpt_platform/backend/test/e2e_test_data.py @@ -23,13 +23,13 @@ from typing import Any, Dict, List from faker import Faker -from backend.data.api_key import create_api_key +from backend.data.auth.api_key import create_api_key from backend.data.credit import get_user_credit_model from backend.data.db import prisma from backend.data.graph import Graph, Link, Node, create_graph +from backend.data.user import get_or_create_user # Import API functions from the backend -from backend.data.user import get_or_create_user from backend.server.v2.library.db import create_library_agent, create_preset from backend.server.v2.library.model import LibraryAgentPresetCreatable from backend.server.v2.store.db import create_store_submission, review_store_submission @@ -464,7 +464,7 @@ class TestDataCreator: api_keys = [] for user in self.users: - from backend.data.api_key import APIKeyPermission + from backend.data.auth.api_key import APIKeyPermission try: # Use the API function to create API key diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx new file mode 100644 index 0000000000..8093b75965 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx @@ -0,0 +1,296 @@ +"use client"; + +import { useState } from "react"; +import { useSearchParams } from "next/navigation"; +import { AuthCard } from "@/components/auth/AuthCard"; +import { Text } from "@/components/atoms/Text/Text"; +import { Button } from "@/components/atoms/Button/Button"; +import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; +import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { ImageIcon, SealCheckIcon } from "@phosphor-icons/react"; +import { + postOauthAuthorize, + useGetOauthGetOauthAppInfo, +} from "@/app/api/__generated__/endpoints/oauth/oauth"; +import type { APIKeyPermission } from "@/app/api/__generated__/models/aPIKeyPermission"; + +// Human-readable scope descriptions +const SCOPE_DESCRIPTIONS: { [key in APIKeyPermission]: string } = { + IDENTITY: "Read user ID, name, e-mail, and timezone", + EXECUTE_GRAPH: "Run your agents", + READ_GRAPH: "View your agents and their configurations", + EXECUTE_BLOCK: "Execute individual blocks", + READ_BLOCK: "View available blocks", + READ_STORE: "Access the Marketplace", + USE_TOOLS: "Use tools on your behalf", + MANAGE_INTEGRATIONS: "Set up new integrations", + READ_INTEGRATIONS: "View your connected integrations", + DELETE_INTEGRATIONS: "Remove connected integrations", +}; + +export default function AuthorizePage() { + const searchParams = useSearchParams(); + + // Extract OAuth parameters from URL + const clientID = searchParams.get("client_id"); + const redirectURI = searchParams.get("redirect_uri"); + const scope = searchParams.get("scope"); + const state = searchParams.get("state"); + const codeChallenge = searchParams.get("code_challenge"); + const codeChallengeMethod = + searchParams.get("code_challenge_method") || "S256"; + const responseType = searchParams.get("response_type") || "code"; + + // Parse requested scopes + const requestedScopes = scope?.split(" ").filter(Boolean) || []; + + // Fetch application info using generated hook + const { + data: appInfoResponse, + isLoading, + error, + refetch, + } = useGetOauthGetOauthAppInfo(clientID || "", { + query: { + enabled: !!clientID, + staleTime: Infinity, + refetchOnMount: false, + refetchOnWindowFocus: false, + refetchOnReconnect: false, + }, + }); + + const appInfo = appInfoResponse?.status === 200 ? appInfoResponse.data : null; + + // Validate required parameters + const missingParams: string[] = []; + if (!clientID) missingParams.push("client_id"); + if (!redirectURI) missingParams.push("redirect_uri"); + if (!scope) missingParams.push("scope"); + if (!state) missingParams.push("state"); + if (!codeChallenge) missingParams.push("code_challenge"); + + const [isAuthorizing, setIsAuthorizing] = useState(false); + const [authorizeError, setAuthorizeError] = useState(null); + + async function handleApprove() { + setIsAuthorizing(true); + setAuthorizeError(null); + + try { + // Call the backend /oauth/authorize POST endpoint + // Returns JSON with redirect_url that we use to redirect the user + const response = await postOauthAuthorize({ + client_id: clientID!, + redirect_uri: redirectURI!, + scopes: requestedScopes, + state: state!, + response_type: responseType, + code_challenge: codeChallenge!, + code_challenge_method: codeChallengeMethod as "S256" | "plain", + }); + + if (response.status === 200 && response.data.redirect_url) { + window.location.href = response.data.redirect_url; + } else { + setAuthorizeError("Authorization failed: no redirect URL received"); + setIsAuthorizing(false); + } + } catch (err) { + console.error("Authorization error:", err); + setAuthorizeError( + err instanceof Error ? err.message : "Authorization failed", + ); + setIsAuthorizing(false); + } + } + + function handleDeny() { + // Redirect back to client with access_denied error + const params = new URLSearchParams({ + error: "access_denied", + error_description: "User denied access", + state: state || "", + }); + window.location.href = `${redirectURI}?${params.toString()}`; + } + + // Show error if missing required parameters + if (missingParams.length > 0) { + return ( +
+ + + +
+ ); + } + + // Show loading state + if (isLoading) { + return ( +
+ +
+ + + Loading application information... + +
+
+
+ ); + } + + // Show error if app not found + if (error || !appInfo) { + return ( +
+ + + {redirectURI && ( + + )} + +
+ ); + } + + // Validate that requested scopes are allowed by the app + const invalidScopes = requestedScopes.filter( + (s) => !appInfo.scopes.includes(s), + ); + + if (invalidScopes.length > 0) { + return ( +
+ + + + +
+ ); + } + + return ( +
+ +
+ {/* App info */} +
+ {/* App logo */} +
+ {appInfo.logo_url ? ( + // eslint-disable-next-line @next/next/no-img-element + {`${appInfo.name} + ) : ( + + )} +
+ + {appInfo.name} + + {appInfo.description && ( + + {appInfo.description} + + )} +
+ + {/* Permissions */} +
+ + This application is requesting permission to: + +
    + {requestedScopes.map((scopeKey) => ( +
  • + + + {SCOPE_DESCRIPTIONS[scopeKey as APIKeyPermission] || + scopeKey} + +
  • + ))} +
+
+ + {/* Error message */} + {authorizeError && ( + + )} + + {/* Action buttons */} +
+ + +
+ + {/* Warning */} + + By authorizing, you allow this application to access your AutoGPT + account with the permissions listed above. + +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts b/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts index bff2fd0b68..13f8d988fe 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts +++ b/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts @@ -74,6 +74,9 @@ export async function GET(request: Request) { ); } + // Get redirect destination from 'next' query parameter + next = searchParams.get("next") || next; + const forwardedHost = request.headers.get("x-forwarded-host"); // original origin before load balancer const isLocalEnv = process.env.NODE_ENV === "development"; if (isLocalEnv) { diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx new file mode 100644 index 0000000000..5163c46d5b --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx @@ -0,0 +1,331 @@ +"use client"; + +import Image from "next/image"; +import Link from "next/link"; +import { useSearchParams } from "next/navigation"; +import { useState, useMemo, useRef } from "react"; +import { AuthCard } from "@/components/auth/AuthCard"; +import { Text } from "@/components/atoms/Text/Text"; +import { Button } from "@/components/atoms/Button/Button"; +import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs"; +import type { + BlockIOCredentialsSubSchema, + CredentialsMetaInput, + CredentialsType, +} from "@/lib/autogpt-server-api"; +import { CheckIcon, CircleIcon } from "@phosphor-icons/react"; +import { useGetOauthGetOauthAppInfo } from "@/app/api/__generated__/endpoints/oauth/oauth"; +import { okData } from "@/app/api/helpers"; +import { OAuthApplicationPublicInfo } from "@/app/api/__generated__/models/oAuthApplicationPublicInfo"; + +// All credential types - we accept any type of credential +const ALL_CREDENTIAL_TYPES: CredentialsType[] = [ + "api_key", + "oauth2", + "user_password", + "host_scoped", +]; + +/** + * Provider configuration for the setup wizard. + * + * Query parameters: + * - `providers`: base64-encoded JSON array of { provider, scopes? } objects + * - `app_name`: (optional) Name of the requesting application + * - `redirect_uri`: Where to redirect after completion + * - `state`: Anti-CSRF token + * + * Example `providers` JSON: + * [ + * { "provider": "google", "scopes": ["https://www.googleapis.com/auth/gmail.readonly"] }, + * { "provider": "github", "scopes": ["repo"] } + * ] + * + * Example URL: + * /auth/integrations/setup-wizard?app_name=My%20App&providers=W3sicHJvdmlkZXIiOiJnb29nbGUifV0=&redirect_uri=... + */ +interface ProviderConfig { + provider: string; + scopes?: string[]; +} + +function createSchemaFromProviderConfig( + config: ProviderConfig, +): BlockIOCredentialsSubSchema { + return { + type: "object", + properties: {}, + credentials_provider: [config.provider], + credentials_types: ALL_CREDENTIAL_TYPES, + credentials_scopes: config.scopes, + discriminator: undefined, + discriminator_mapping: undefined, + discriminator_values: undefined, + }; +} + +function toDisplayName(provider: string): string { + // Convert snake_case or kebab-case to Title Case + return provider + .split(/[_-]/) + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(" "); +} + +function parseProvidersParam(providersParam: string): ProviderConfig[] { + try { + // Decode base64 and parse JSON + const decoded = atob(providersParam); + const parsed = JSON.parse(decoded); + + if (!Array.isArray(parsed)) { + console.warn("providers parameter is not an array"); + return []; + } + + return parsed.filter( + (item): item is ProviderConfig => + typeof item === "object" && + item !== null && + typeof item.provider === "string", + ); + } catch (error) { + console.warn("Failed to parse providers parameter:", error); + return []; + } +} + +export default function IntegrationSetupWizardPage() { + const searchParams = useSearchParams(); + + // Extract query parameters + // `providers` is a base64-encoded JSON array of { provider, scopes?: string[] } objects + const clientID = searchParams.get("client_id"); + const providersParam = searchParams.get("providers"); + const redirectURI = searchParams.get("redirect_uri"); + const state = searchParams.get("state"); + + const { data: appInfo } = useGetOauthGetOauthAppInfo(clientID || "", { + query: { enabled: !!clientID, select: okData }, + }); + + // Parse providers from base64-encoded JSON + const providerConfigs = useMemo(() => { + if (!providersParam) return []; + return parseProvidersParam(providersParam); + }, [providersParam]); + + // Track selected credentials for each provider + const [selectedCredentials, setSelectedCredentials] = useState< + Record + >({}); + + // Track if we've already redirected + const hasRedirectedRef = useRef(false); + + // Check if all providers have credentials + const isAllComplete = useMemo(() => { + if (providerConfigs.length === 0) return false; + return providerConfigs.every( + (config) => selectedCredentials[config.provider], + ); + }, [providerConfigs, selectedCredentials]); + + // Handle credential selection + const handleCredentialSelect = ( + provider: string, + credential?: CredentialsMetaInput, + ) => { + setSelectedCredentials((prev) => ({ + ...prev, + [provider]: credential, + })); + }; + + // Handle completion - redirect back to client + const handleComplete = () => { + if (!redirectURI || hasRedirectedRef.current) return; + hasRedirectedRef.current = true; + + const params = new URLSearchParams({ + success: "true", + }); + if (state) { + params.set("state", state); + } + + window.location.href = `${redirectURI}?${params.toString()}`; + }; + + // Handle cancel - redirect back to client with error + const handleCancel = () => { + if (!redirectURI || hasRedirectedRef.current) return; + hasRedirectedRef.current = true; + + const params = new URLSearchParams({ + error: "user_cancelled", + error_description: "User cancelled the integration setup", + }); + if (state) { + params.set("state", state); + } + + window.location.href = `${redirectURI}?${params.toString()}`; + }; + + // Validate required parameters + const missingParams: string[] = []; + if (!providersParam) missingParams.push("providers"); + if (!redirectURI) missingParams.push("redirect_uri"); + + if (missingParams.length > 0) { + return ( +
+ + + +
+ ); + } + + if (providerConfigs.length === 0) { + return ( +
+ + + + +
+ ); + } + + return ( +
+ +
+ + {appInfo ? ( + <> + {appInfo.name} is requesting you to connect the + following integrations to your AutoGPT account. + + ) : ( + "Please connect the following integrations to continue." + )} + + + {/* Provider credentials list */} +
+ {providerConfigs.map((config) => { + const schema = createSchemaFromProviderConfig(config); + const isSelected = !!selectedCredentials[config.provider]; + + return ( +
+
+
+ {`${config.provider} +
+ + {toDisplayName(config.provider)} + +
+ {isSelected ? ( + + ) : ( + + )} + {isSelected && ( + + Connected + + )} +
+ + + handleCredentialSelect(config.provider, credMeta) + } + showTitle={false} + className="mb-0" + /> +
+ ); + })} +
+ + {/* Action buttons */} +
+ + +
+ + {/* Link to integrations settings */} + + You can view and manage all your integrations in your{" "} + + integration settings + + . + +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx index e63105c751..07350fb610 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx @@ -15,13 +15,14 @@ import { HostScopedCredentialsModal } from "./components/HotScopedCredentialsMod import { OAuthFlowWaitingModal } from "./components/OAuthWaitingModal/OAuthWaitingModal"; import { PasswordCredentialsModal } from "./components/PasswordCredentialsModal/PasswordCredentialsModal"; import { getCredentialDisplayName } from "./helpers"; -import { useCredentialsInputs } from "./useCredentialsInputs"; - -type UseCredentialsInputsReturn = ReturnType; +import { + CredentialsInputState, + useCredentialsInput, +} from "./useCredentialsInput"; function isLoaded( - data: UseCredentialsInputsReturn, -): data is Extract { + data: CredentialsInputState, +): data is Extract { return data.isLoading === false; } @@ -33,21 +34,23 @@ type Props = { onSelectCredentials: (newValue?: CredentialsMetaInput) => void; onLoaded?: (loaded: boolean) => void; readOnly?: boolean; + showTitle?: boolean; }; export function CredentialsInput({ schema, className, - selectedCredentials, - onSelectCredentials, + selectedCredentials: selectedCredential, + onSelectCredentials: onSelectCredential, siblingInputs, onLoaded, readOnly = false, + showTitle = true, }: Props) { - const hookData = useCredentialsInputs({ + const hookData = useCredentialsInput({ schema, - selectedCredentials, - onSelectCredentials, + selectedCredential, + onSelectCredential, siblingInputs, onLoaded, readOnly, @@ -89,12 +92,14 @@ export function CredentialsInput({ return (
-
- {displayName} credentials - {schema.description && ( - - )} -
+ {showTitle && ( +
+ {displayName} credentials + {schema.description && ( + + )} +
+ )} {hasCredentialsToShow ? ( <> @@ -103,7 +108,7 @@ export function CredentialsInput({ credentials={credentialsToShow} provider={provider} displayName={displayName} - selectedCredentials={selectedCredentials} + selectedCredentials={selectedCredential} onSelectCredential={handleCredentialSelect} readOnly={readOnly} /> @@ -164,7 +169,7 @@ export function CredentialsInput({ open={isAPICredentialsModalOpen} onClose={() => setAPICredentialsModalOpen(false)} onCredentialsCreate={(credsMeta) => { - onSelectCredentials(credsMeta); + onSelectCredential(credsMeta); setAPICredentialsModalOpen(false); }} siblingInputs={siblingInputs} @@ -183,7 +188,7 @@ export function CredentialsInput({ open={isUserPasswordCredentialsModalOpen} onClose={() => setUserPasswordCredentialsModalOpen(false)} onCredentialsCreate={(creds) => { - onSelectCredentials(creds); + onSelectCredential(creds); setUserPasswordCredentialsModalOpen(false); }} siblingInputs={siblingInputs} @@ -195,7 +200,7 @@ export function CredentialsInput({ open={isHostScopedCredentialsModalOpen} onClose={() => setHostScopedCredentialsModalOpen(false)} onCredentialsCreate={(creds) => { - onSelectCredentials(creds); + onSelectCredential(creds); setHostScopedCredentialsModalOpen(false); }} siblingInputs={siblingInputs} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts similarity index 76% rename from autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts rename to autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts index 460980c10b..6f5ca48126 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts @@ -5,32 +5,33 @@ import { BlockIOCredentialsSubSchema, CredentialsMetaInput, } from "@/lib/autogpt-server-api/types"; -import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider"; import { useQueryClient } from "@tanstack/react-query"; -import { useContext, useEffect, useMemo, useState } from "react"; +import { useEffect, useMemo, useState } from "react"; import { getActionButtonText, OAUTH_TIMEOUT_MS, OAuthPopupResultMessage, } from "./helpers"; -type Args = { +export type CredentialsInputState = ReturnType; + +type Params = { schema: BlockIOCredentialsSubSchema; - selectedCredentials?: CredentialsMetaInput; - onSelectCredentials: (newValue?: CredentialsMetaInput) => void; + selectedCredential?: CredentialsMetaInput; + onSelectCredential: (newValue?: CredentialsMetaInput) => void; siblingInputs?: Record; onLoaded?: (loaded: boolean) => void; readOnly?: boolean; }; -export function useCredentialsInputs({ +export function useCredentialsInput({ schema, - selectedCredentials, - onSelectCredentials, + selectedCredential, + onSelectCredential, siblingInputs, onLoaded, readOnly = false, -}: Args) { +}: Params) { const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] = useState(false); const [ @@ -51,7 +52,6 @@ export function useCredentialsInputs({ const api = useBackendAPI(); const queryClient = useQueryClient(); const credentials = useCredentials(schema, siblingInputs); - const allProviders = useContext(CredentialsProvidersContext); const deleteCredentialsMutation = useDeleteV1DeleteCredentials({ mutation: { @@ -63,57 +63,49 @@ export function useCredentialsInputs({ queryKey: [`/api/integrations/${credentials?.provider}/credentials`], }); setCredentialToDelete(null); - if (selectedCredentials?.id === credentialToDelete?.id) { - onSelectCredentials(undefined); + if (selectedCredential?.id === credentialToDelete?.id) { + onSelectCredential(undefined); } }, }, }); - const rawProvider = credentials - ? allProviders?.[credentials.provider as keyof typeof allProviders] - : null; - useEffect(() => { if (onLoaded) { onLoaded(Boolean(credentials && credentials.isLoading === false)); } }, [credentials, onLoaded]); + // Unselect credential if not available useEffect(() => { if (readOnly) return; if (!credentials || !("savedCredentials" in credentials)) return; if ( - selectedCredentials && - !credentials.savedCredentials.some((c) => c.id === selectedCredentials.id) + selectedCredential && + !credentials.savedCredentials.some((c) => c.id === selectedCredential.id) ) { - onSelectCredentials(undefined); + onSelectCredential(undefined); } - }, [credentials, selectedCredentials, onSelectCredentials, readOnly]); + }, [credentials, selectedCredential, onSelectCredential, readOnly]); - const { singleCredential } = useMemo(() => { + // The available credential, if there is only one + const singleCredential = useMemo(() => { if (!credentials || !("savedCredentials" in credentials)) { - return { - singleCredential: null, - }; + return null; } - const single = - credentials.savedCredentials.length === 1 - ? credentials.savedCredentials[0] - : null; - - return { - singleCredential: single, - }; + return credentials.savedCredentials.length === 1 + ? credentials.savedCredentials[0] + : null; }, [credentials]); + // Auto-select the one available credential useEffect(() => { if (readOnly) return; - if (singleCredential && !selectedCredentials) { - onSelectCredentials(singleCredential); + if (singleCredential && !selectedCredential) { + onSelectCredential(singleCredential); } - }, [singleCredential, selectedCredentials, onSelectCredentials, readOnly]); + }, [singleCredential, selectedCredential, onSelectCredential, readOnly]); if ( !credentials || @@ -136,25 +128,6 @@ export function useCredentialsInputs({ oAuthCallback, } = credentials; - const allSavedCredentials = rawProvider?.savedCredentials || savedCredentials; - - const credentialsToShow = (() => { - const creds = [...allSavedCredentials]; - if ( - !readOnly && - selectedCredentials && - !creds.some((c) => c.id === selectedCredentials.id) - ) { - creds.push({ - id: selectedCredentials.id, - type: selectedCredentials.type, - title: selectedCredentials.title || "Selected credential", - provider: provider, - } as any); - } - return creds; - })(); - async function handleOAuthLogin() { setOAuthError(null); const { login_url, state_token } = await api.oAuthLogin( @@ -207,7 +180,31 @@ export function useCredentialsInputs({ console.debug("Processing OAuth callback"); const credentials = await oAuthCallback(e.data.code, e.data.state); console.debug("OAuth callback processed successfully"); - onSelectCredentials({ + + // Check if the credential's scopes match the required scopes + const requiredScopes = schema.credentials_scopes; + if (requiredScopes && requiredScopes.length > 0) { + const grantedScopes = new Set(credentials.scopes || []); + const hasAllRequiredScopes = new Set(requiredScopes).isSubsetOf( + grantedScopes, + ); + + if (!hasAllRequiredScopes) { + console.error( + `Newly created OAuth credential for ${providerName} has insufficient scopes. Required:`, + requiredScopes, + "Granted:", + credentials.scopes, + ); + setOAuthError( + "Connection failed: the granted permissions don't match what's required. " + + "Please contact the application administrator.", + ); + return; + } + } + + onSelectCredential({ id: credentials.id, type: "oauth2", title: credentials.title, @@ -253,9 +250,9 @@ export function useCredentialsInputs({ } function handleCredentialSelect(credentialId: string) { - const selectedCreds = credentialsToShow.find((c) => c.id === credentialId); + const selectedCreds = savedCredentials.find((c) => c.id === credentialId); if (selectedCreds) { - onSelectCredentials({ + onSelectCredential({ id: selectedCreds.id, type: selectedCreds.type, provider: provider, @@ -285,8 +282,8 @@ export function useCredentialsInputs({ supportsOAuth2, supportsUserPassword, supportsHostScoped, - credentialsToShow, - selectedCredentials, + credentialsToShow: savedCredentials, + selectedCredential, oAuthError, isAPICredentialsModalOpen, isUserPasswordCredentialsModalOpen, @@ -300,7 +297,7 @@ export function useCredentialsInputs({ supportsApiKey, supportsUserPassword, supportsHostScoped, - credentialsToShow.length > 0, + savedCredentials.length > 0, ), setAPICredentialsModalOpen, setUserPasswordCredentialsModalOpen, @@ -311,7 +308,7 @@ export function useCredentialsInputs({ handleDeleteCredential, handleDeleteConfirm, handleOAuthLogin, - onSelectCredentials, + onSelectCredential, schema, siblingInputs, }; diff --git a/autogpt_platform/frontend/src/app/(platform)/login/page.tsx b/autogpt_platform/frontend/src/app/(platform)/login/page.tsx index 3f06e7f429..b670be5127 100644 --- a/autogpt_platform/frontend/src/app/(platform)/login/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/login/page.tsx @@ -11,8 +11,16 @@ import { environment } from "@/services/environment"; import { LoadingLogin } from "./components/LoadingLogin"; import { useLoginPage } from "./useLoginPage"; import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner"; +import { useSearchParams } from "next/navigation"; export default function LoginPage() { + const searchParams = useSearchParams(); + const nextUrl = searchParams.get("next"); + // Preserve next parameter when switching between login/signup + const signupHref = nextUrl + ? `/signup?next=${encodeURIComponent(nextUrl)}` + : "/signup"; + const { user, form, @@ -108,7 +116,7 @@ export default function LoginPage() { diff --git a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts index a1e8b5a92c..656e1febc2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts @@ -3,7 +3,7 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { environment } from "@/services/environment"; import { loginFormSchema, LoginProvider } from "@/types/auth"; import { zodResolver } from "@hookform/resolvers/zod"; -import { useRouter } from "next/navigation"; +import { useRouter, useSearchParams } from "next/navigation"; import { useEffect, useState } from "react"; import { useForm } from "react-hook-form"; import z from "zod"; @@ -13,6 +13,7 @@ export function useLoginPage() { const { supabase, user, isUserLoading, isLoggedIn } = useSupabase(); const [feedback, setFeedback] = useState(null); const router = useRouter(); + const searchParams = useSearchParams(); const { toast } = useToast(); const [isLoading, setIsLoading] = useState(false); const [isLoggingIn, setIsLoggingIn] = useState(false); @@ -20,11 +21,14 @@ export function useLoginPage() { const [showNotAllowedModal, setShowNotAllowedModal] = useState(false); const isCloudEnv = environment.isCloud(); + // Get redirect destination from 'next' query parameter + const nextUrl = searchParams.get("next"); + useEffect(() => { if (isLoggedIn && !isLoggingIn) { - router.push("/marketplace"); + router.push(nextUrl || "/marketplace"); } - }, [isLoggedIn, isLoggingIn]); + }, [isLoggedIn, isLoggingIn, nextUrl, router]); const form = useForm>({ resolver: zodResolver(loginFormSchema), @@ -39,10 +43,16 @@ export function useLoginPage() { setIsLoggingIn(true); try { + // Include next URL in OAuth flow if present + const callbackUrl = nextUrl + ? `/auth/callback?next=${encodeURIComponent(nextUrl)}` + : `/auth/callback`; + const fullCallbackUrl = `${window.location.origin}${callbackUrl}`; + const response = await fetch("/api/auth/provider", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ provider }), + body: JSON.stringify({ provider, redirectTo: fullCallbackUrl }), }); if (!response.ok) { @@ -83,7 +93,9 @@ export function useLoginPage() { throw new Error(result.error || "Login failed"); } - if (result.onboarding) { + if (nextUrl) { + router.replace(nextUrl); + } else if (result.onboarding) { router.replace("/onboarding"); } else { router.replace("/marketplace"); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/useAPISection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/useAPISection.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/APIKeysModals.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/APIKeysModals.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/APIKeysModals.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/APIKeysModals.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/useAPIkeysModals.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/useAPIkeysModals.ts similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/useAPIkeysModals.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/useAPIkeysModals.ts diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx similarity index 94% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx index ca66f0fb85..aedc3cc60c 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx @@ -1,5 +1,5 @@ import { Metadata } from "next/types"; -import { APIKeysSection } from "@/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection"; +import { APIKeysSection } from "@/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection"; import { Card, CardContent, diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx index 800028a49f..ca0e846557 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx @@ -3,13 +3,14 @@ import * as React from "react"; import { Sidebar } from "@/components/__legacy__/Sidebar"; import { - IconDashboardLayout, - IconIntegrations, - IconProfile, - IconSliders, - IconCoin, -} from "@/components/__legacy__/ui/icons"; -import { KeyIcon } from "lucide-react"; + AppWindowIcon, + CoinsIcon, + KeyIcon, + PlugsIcon, + SlidersHorizontalIcon, + StorefrontIcon, + UserCircleIcon, +} from "@phosphor-icons/react"; import { useGetFlag, Flag } from "@/services/feature-flags/use-get-flag"; export default function Layout({ children }: { children: React.ReactNode }) { @@ -18,39 +19,44 @@ export default function Layout({ children }: { children: React.ReactNode }) { const sidebarLinkGroups = [ { links: [ + { + text: "Profile", + href: "/profile", + icon: , + }, { text: "Creator Dashboard", href: "/profile/dashboard", - icon: , + icon: , }, - ...(isPaymentEnabled + ...(isPaymentEnabled || true ? [ { text: "Billing", href: "/profile/credits", - icon: , + icon: , }, ] : []), { text: "Integrations", href: "/profile/integrations", - icon: , - }, - { - text: "API Keys", - href: "/profile/api_keys", - icon: , - }, - { - text: "Profile", - href: "/profile", - icon: , + icon: , }, { text: "Settings", href: "/profile/settings", - icon: , + icon: , + }, + { + text: "API Keys", + href: "/profile/api-keys", + icon: , + }, + { + text: "OAuth Apps", + href: "/profile/oauth-apps", + icon: , }, ], }, diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx new file mode 100644 index 0000000000..a864199348 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx @@ -0,0 +1,147 @@ +"use client"; + +import { useRef } from "react"; +import { UploadIcon, ImageIcon, PowerIcon } from "@phosphor-icons/react"; +import { Button } from "@/components/atoms/Button/Button"; +import { Badge } from "@/components/atoms/Badge/Badge"; +import { useOAuthApps } from "./useOAuthApps"; +import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; + +export function OAuthAppsSection() { + const { + oauthApps, + isLoading, + updatingAppId, + uploadingAppId, + handleToggleStatus, + handleUploadLogo, + } = useOAuthApps(); + + const fileInputRefs = useRef<{ [key: string]: HTMLInputElement | null }>({}); + + const handleFileChange = ( + appId: string, + event: React.ChangeEvent, + ) => { + const file = event.target.files?.[0]; + if (file) { + handleUploadLogo(appId, file); + } + // Reset the input so the same file can be selected again + event.target.value = ""; + }; + + if (isLoading) { + return ( +
+ +
+ ); + } + + if (oauthApps.length === 0) { + return ( +
+

You don't have any OAuth applications.

+

+ OAuth applications can currently not be registered + via the API. Contact the system administrator to request an OAuth app + registration. +

+
+ ); + } + + return ( +
+ {oauthApps.map((app) => ( +
+ {/* Header: Logo, Name, Status */} +
+
+ {app.logo_url ? ( + // eslint-disable-next-line @next/next/no-img-element + {`${app.name} + ) : ( + + )} +
+
+
+

{app.name}

+ + {app.is_active ? "Active" : "Disabled"} + +
+ {app.description && ( +

+ {app.description} +

+ )} +
+
+ + {/* Client ID */} +
+ + Client ID + + + {app.client_id} + +
+ + {/* Footer: Created date and Actions */} +
+ + Created {new Date(app.created_at).toLocaleDateString()} + +
+ + { + fileInputRefs.current[app.id] = el; + }} + onChange={(e) => handleFileChange(app.id, e)} + accept="image/jpeg,image/png,image/webp" + className="hidden" + /> + +
+
+
+ ))} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts new file mode 100644 index 0000000000..5b5afc5783 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts @@ -0,0 +1,110 @@ +"use client"; + +import { useState } from "react"; +import { + useGetOauthListMyOauthApps, + usePatchOauthUpdateAppStatus, + usePostOauthUploadAppLogo, + getGetOauthListMyOauthAppsQueryKey, +} from "@/app/api/__generated__/endpoints/oauth/oauth"; +import { OAuthApplicationInfo } from "@/app/api/__generated__/models/oAuthApplicationInfo"; +import { okData } from "@/app/api/helpers"; +import { useToast } from "@/components/molecules/Toast/use-toast"; +import { getQueryClient } from "@/lib/react-query/queryClient"; + +export const useOAuthApps = () => { + const queryClient = getQueryClient(); + const { toast } = useToast(); + const [updatingAppId, setUpdatingAppId] = useState(null); + const [uploadingAppId, setUploadingAppId] = useState(null); + + const { data: oauthAppsResponse, isLoading } = useGetOauthListMyOauthApps({ + query: { select: okData }, + }); + + const { mutateAsync: updateStatus } = usePatchOauthUpdateAppStatus({ + mutation: { + onSettled: () => { + return queryClient.invalidateQueries({ + queryKey: getGetOauthListMyOauthAppsQueryKey(), + }); + }, + }, + }); + + const { mutateAsync: uploadLogo } = usePostOauthUploadAppLogo({ + mutation: { + onSettled: () => { + return queryClient.invalidateQueries({ + queryKey: getGetOauthListMyOauthAppsQueryKey(), + }); + }, + }, + }); + + const handleToggleStatus = async (appId: string, currentStatus: boolean) => { + try { + setUpdatingAppId(appId); + const result = await updateStatus({ + appId, + data: { is_active: !currentStatus }, + }); + + if (result.status === 200) { + toast({ + title: "Success", + description: `Application ${result.data.is_active ? "enabled" : "disabled"} successfully`, + }); + } else { + throw new Error("Failed to update status"); + } + } catch { + toast({ + title: "Error", + description: "Failed to update application status", + variant: "destructive", + }); + } finally { + setUpdatingAppId(null); + } + }; + + const handleUploadLogo = async (appId: string, file: File) => { + try { + setUploadingAppId(appId); + const result = await uploadLogo({ + appId, + data: { file }, + }); + + if (result.status === 200) { + toast({ + title: "Success", + description: "Logo uploaded successfully", + }); + } else { + throw new Error("Failed to upload logo"); + } + } catch (error) { + console.error("Failed to upload logo:", error); + const errorMessage = + error instanceof Error ? error.message : "Failed to upload logo"; + toast({ + title: "Error", + description: errorMessage, + variant: "destructive", + }); + } finally { + setUploadingAppId(null); + } + }; + + return { + oauthApps: oauthAppsResponse ?? [], + isLoading, + updatingAppId, + uploadingAppId, + handleToggleStatus, + handleUploadLogo, + }; +}; diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx new file mode 100644 index 0000000000..4251bb954e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx @@ -0,0 +1,21 @@ +import { Metadata } from "next/types"; +import { Text } from "@/components/atoms/Text/Text"; +import { OAuthAppsSection } from "./components/OAuthAppsSection"; + +export const metadata: Metadata = { title: "OAuth Apps - AutoGPT Platform" }; + +const OAuthAppsPage = () => { + return ( +
+
+ OAuth Applications + + Manage your OAuth applications that use the AutoGPT Platform API + +
+ +
+ ); +}; + +export default OAuthAppsPage; diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx b/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx index 53c47eeba7..b565699426 100644 --- a/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx @@ -21,8 +21,16 @@ import { WarningOctagonIcon } from "@phosphor-icons/react/dist/ssr"; import { LoadingSignup } from "./components/LoadingSignup"; import { useSignupPage } from "./useSignupPage"; import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner"; +import { useSearchParams } from "next/navigation"; export default function SignupPage() { + const searchParams = useSearchParams(); + const nextUrl = searchParams.get("next"); + // Preserve next parameter when switching between login/signup + const loginHref = nextUrl + ? `/login?next=${encodeURIComponent(nextUrl)}` + : "/login"; + const { form, feedback, @@ -186,7 +194,7 @@ export default function SignupPage() { diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts index 23ee8fb57c..e6d7c68aef 100644 --- a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts @@ -3,7 +3,7 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { environment } from "@/services/environment"; import { LoginProvider, signupFormSchema } from "@/types/auth"; import { zodResolver } from "@hookform/resolvers/zod"; -import { useRouter } from "next/navigation"; +import { useRouter, useSearchParams } from "next/navigation"; import { useEffect, useState } from "react"; import { useForm } from "react-hook-form"; import z from "zod"; @@ -14,17 +14,21 @@ export function useSignupPage() { const [feedback, setFeedback] = useState(null); const { toast } = useToast(); const router = useRouter(); + const searchParams = useSearchParams(); const [isLoading, setIsLoading] = useState(false); const [isSigningUp, setIsSigningUp] = useState(false); const [isGoogleLoading, setIsGoogleLoading] = useState(false); const [showNotAllowedModal, setShowNotAllowedModal] = useState(false); const isCloudEnv = environment.isCloud(); + // Get redirect destination from 'next' query parameter + const nextUrl = searchParams.get("next"); + useEffect(() => { if (isLoggedIn && !isSigningUp) { - router.push("/marketplace"); + router.push(nextUrl || "/marketplace"); } - }, [isLoggedIn, isSigningUp]); + }, [isLoggedIn, isSigningUp, nextUrl, router]); const form = useForm>({ resolver: zodResolver(signupFormSchema), @@ -41,10 +45,16 @@ export function useSignupPage() { setIsSigningUp(true); try { + // Include next URL in OAuth flow if present + const callbackUrl = nextUrl + ? `/auth/callback?next=${encodeURIComponent(nextUrl)}` + : `/auth/callback`; + const fullCallbackUrl = `${window.location.origin}${callbackUrl}`; + const response = await fetch("/api/auth/provider", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ provider }), + body: JSON.stringify({ provider, redirectTo: fullCallbackUrl }), }); if (!response.ok) { @@ -118,8 +128,9 @@ export function useSignupPage() { return; } - const next = result.next || "/"; - if (next) router.replace(next); + // Prefer the URL's next parameter, then result.next (for onboarding), then default + const redirectTo = nextUrl || result.next || "/"; + router.replace(redirectTo); } catch (error) { setIsLoading(false); setIsSigningUp(false); diff --git a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts index 0a31eb6942..315b68ab87 100644 --- a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts +++ b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts @@ -113,6 +113,19 @@ export const customMutator = async < body: data, }); + // Check if response is a redirect (3xx) and redirect is allowed + const allowRedirect = requestOptions.redirect !== "error"; + const isRedirect = response.status >= 300 && response.status < 400; + + // For redirect responses, return early without trying to parse body + if (allowRedirect && isRedirect) { + return { + status: response.status, + data: null, + headers: response.headers, + } as T; + } + if (!response.ok) { let responseData: any = null; try { diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index f8c5563476..3556e2f5c7 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -5370,6 +5370,369 @@ } } }, + "/api/oauth/app/{client_id}": { + "get": { + "tags": ["oauth"], + "summary": "Get Oauth App Info", + "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", + "operationId": "getOauthGetOauthAppInfo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "client_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Client Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationPublicInfo" + } + } + } + }, + "404": { "description": "Application not found or disabled" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/authorize": { + "post": { + "tags": ["oauth"], + "summary": "Authorize", + "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", + "operationId": "postOauthAuthorize", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/token": { + "post": { + "tags": ["oauth"], + "summary": "Token", + "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", + "operationId": "postOauthToken", + "requestBody": { + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/TokenRequestByCode" }, + { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } + ], + "title": "Request" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TokenResponse" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/introspect": { + "post": { + "tags": ["oauth"], + "summary": "Introspect", + "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", + "operationId": "postOauthIntrospect", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthIntrospect" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TokenIntrospectionResult" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/revoke": { + "post": { + "tags": ["oauth"], + "summary": "Revoke", + "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", + "operationId": "postOauthRevoke", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/mine": { + "get": { + "tags": ["oauth"], + "summary": "List My Oauth Apps", + "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", + "operationId": "getOauthListMyOauthApps", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + }, + "type": "array", + "title": "Response Getoauthlistmyoauthapps" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/apps/{app_id}/status": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Status", + "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppStatus", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/apps/{app_id}/logo": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Logo", + "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/apps/{app_id}/logo/upload": { + "post": { + "tags": ["oauth"], + "summary": "Upload App Logo", + "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", + "operationId": "postOauthUploadAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, "/health": { "get": { "tags": ["health"], @@ -5418,29 +5781,30 @@ }, "APIKeyInfo": { "properties": { - "id": { "type": "string", "title": "Id" }, - "name": { "type": "string", "title": "Name" }, - "head": { - "type": "string", - "title": "Head", - "description": "The first 8 characters of the key" - }, - "tail": { - "type": "string", - "title": "Tail", - "description": "The last 8 characters of the key" - }, - "status": { "$ref": "#/components/schemas/APIKeyStatus" }, - "permissions": { + "user_id": { "type": "string", "title": "User Id" }, + "scopes": { "items": { "$ref": "#/components/schemas/APIKeyPermission" }, "type": "array", - "title": "Permissions" + "title": "Scopes" + }, + "type": { + "type": "string", + "const": "api_key", + "title": "Type", + "default": "api_key" }, "created_at": { "type": "string", "format": "date-time", "title": "Created At" }, + "expires_at": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Expires At" + }, "last_used_at": { "anyOf": [ { "type": "string", "format": "date-time" }, @@ -5455,28 +5819,41 @@ ], "title": "Revoked At" }, + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "head": { + "type": "string", + "title": "Head", + "description": "The first 8 characters of the key" + }, + "tail": { + "type": "string", + "title": "Tail", + "description": "The last 8 characters of the key" + }, + "status": { "$ref": "#/components/schemas/APIKeyStatus" }, "description": { "anyOf": [{ "type": "string" }, { "type": "null" }], "title": "Description" - }, - "user_id": { "type": "string", "title": "User Id" } + } }, "type": "object", "required": [ + "user_id", + "scopes", + "created_at", "id", "name", "head", "tail", - "status", - "permissions", - "created_at", - "user_id" + "status" ], "title": "APIKeyInfo" }, "APIKeyPermission": { "type": "string", "enum": [ + "IDENTITY", "EXECUTE_GRAPH", "READ_GRAPH", "EXECUTE_BLOCK", @@ -5614,6 +5991,72 @@ "required": ["answer", "documents", "success"], "title": "ApiResponse" }, + "AuthorizeRequest": { + "properties": { + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "redirect_uri": { + "type": "string", + "title": "Redirect Uri", + "description": "Redirect URI" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes", + "description": "List of scopes" + }, + "state": { + "type": "string", + "title": "State", + "description": "Anti-CSRF token from client" + }, + "response_type": { + "type": "string", + "title": "Response Type", + "description": "Must be 'code' for authorization code flow", + "default": "code" + }, + "code_challenge": { + "type": "string", + "title": "Code Challenge", + "description": "PKCE code challenge (required)" + }, + "code_challenge_method": { + "type": "string", + "enum": ["S256", "plain"], + "title": "Code Challenge Method", + "description": "PKCE code challenge method (S256 recommended)", + "default": "S256" + } + }, + "type": "object", + "required": [ + "client_id", + "redirect_uri", + "scopes", + "state", + "code_challenge" + ], + "title": "AuthorizeRequest", + "description": "OAuth 2.0 authorization request" + }, + "AuthorizeResponse": { + "properties": { + "redirect_url": { + "type": "string", + "title": "Redirect Url", + "description": "URL to redirect the user to" + } + }, + "type": "object", + "required": ["redirect_url"], + "title": "AuthorizeResponse", + "description": "OAuth 2.0 authorization response with redirect URL" + }, "AutoTopUpConfig": { "properties": { "amount": { "type": "integer", "title": "Amount" }, @@ -5863,6 +6306,86 @@ "required": ["blocks", "pagination"], "title": "BlockResponse" }, + "Body_patchOauthUpdateAppStatus": { + "properties": { + "is_active": { + "type": "boolean", + "title": "Is Active", + "description": "Whether the app should be active" + } + }, + "type": "object", + "required": ["is_active"], + "title": "Body_patchOauthUpdateAppStatus" + }, + "Body_postOauthIntrospect": { + "properties": { + "token": { + "type": "string", + "title": "Token", + "description": "Token to introspect" + }, + "token_type_hint": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type Hint", + "description": "Hint about token type ('access_token' or 'refresh_token')" + }, + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "Client secret" + } + }, + "type": "object", + "required": ["token", "client_id", "client_secret"], + "title": "Body_postOauthIntrospect" + }, + "Body_postOauthRevoke": { + "properties": { + "token": { + "type": "string", + "title": "Token", + "description": "Token to revoke" + }, + "token_type_hint": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type Hint", + "description": "Hint about token type ('access_token' or 'refresh_token')" + }, + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "Client secret" + } + }, + "type": "object", + "required": ["token", "client_id", "client_secret"], + "title": "Body_postOauthRevoke" + }, + "Body_postOauthUploadAppLogo": { + "properties": { + "file": { "type": "string", "format": "binary", "title": "File" } + }, + "type": "object", + "required": ["file"], + "title": "Body_postOauthUploadAppLogo" + }, "Body_postV1Exchange_oauth_code_for_tokens": { "properties": { "code": { @@ -7855,6 +8378,85 @@ "required": ["provider", "access_token", "scopes"], "title": "OAuth2Credentials" }, + "OAuthApplicationInfo": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "description": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Description" + }, + "logo_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Logo Url" + }, + "client_id": { "type": "string", "title": "Client Id" }, + "redirect_uris": { + "items": { "type": "string" }, + "type": "array", + "title": "Redirect Uris" + }, + "grant_types": { + "items": { "type": "string" }, + "type": "array", + "title": "Grant Types" + }, + "scopes": { + "items": { "$ref": "#/components/schemas/APIKeyPermission" }, + "type": "array", + "title": "Scopes" + }, + "owner_id": { "type": "string", "title": "Owner Id" }, + "is_active": { "type": "boolean", "title": "Is Active" }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Updated At" + } + }, + "type": "object", + "required": [ + "id", + "name", + "client_id", + "redirect_uris", + "grant_types", + "scopes", + "owner_id", + "is_active", + "created_at", + "updated_at" + ], + "title": "OAuthApplicationInfo", + "description": "OAuth application information (without client secret hash)" + }, + "OAuthApplicationPublicInfo": { + "properties": { + "name": { "type": "string", "title": "Name" }, + "description": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Description" + }, + "logo_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Logo Url" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes" + } + }, + "type": "object", + "required": ["name", "scopes"], + "title": "OAuthApplicationPublicInfo", + "description": "Public information about an OAuth application (for consent screen)" + }, "OnboardingStep": { "type": "string", "enum": [ @@ -9892,6 +10494,134 @@ "required": ["timezone"], "title": "TimezoneResponse" }, + "TokenIntrospectionResult": { + "properties": { + "active": { "type": "boolean", "title": "Active" }, + "scopes": { + "anyOf": [ + { "items": { "type": "string" }, "type": "array" }, + { "type": "null" } + ], + "title": "Scopes" + }, + "client_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Client Id" + }, + "user_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "User Id" + }, + "exp": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Exp" + }, + "token_type": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type" + } + }, + "type": "object", + "required": ["active"], + "title": "TokenIntrospectionResult", + "description": "Result of token introspection (RFC 7662)" + }, + "TokenRequestByCode": { + "properties": { + "grant_type": { + "type": "string", + "const": "authorization_code", + "title": "Grant Type" + }, + "code": { + "type": "string", + "title": "Code", + "description": "Authorization code" + }, + "redirect_uri": { + "type": "string", + "title": "Redirect Uri", + "description": "Redirect URI (must match authorization request)" + }, + "client_id": { "type": "string", "title": "Client Id" }, + "client_secret": { "type": "string", "title": "Client Secret" }, + "code_verifier": { + "type": "string", + "title": "Code Verifier", + "description": "PKCE code verifier" + } + }, + "type": "object", + "required": [ + "grant_type", + "code", + "redirect_uri", + "client_id", + "client_secret", + "code_verifier" + ], + "title": "TokenRequestByCode" + }, + "TokenRequestByRefreshToken": { + "properties": { + "grant_type": { + "type": "string", + "const": "refresh_token", + "title": "Grant Type" + }, + "refresh_token": { "type": "string", "title": "Refresh Token" }, + "client_id": { "type": "string", "title": "Client Id" }, + "client_secret": { "type": "string", "title": "Client Secret" } + }, + "type": "object", + "required": [ + "grant_type", + "refresh_token", + "client_id", + "client_secret" + ], + "title": "TokenRequestByRefreshToken" + }, + "TokenResponse": { + "properties": { + "token_type": { + "type": "string", + "const": "Bearer", + "title": "Token Type", + "default": "Bearer" + }, + "access_token": { "type": "string", "title": "Access Token" }, + "access_token_expires_at": { + "type": "string", + "format": "date-time", + "title": "Access Token Expires At" + }, + "refresh_token": { "type": "string", "title": "Refresh Token" }, + "refresh_token_expires_at": { + "type": "string", + "format": "date-time", + "title": "Refresh Token Expires At" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes" + } + }, + "type": "object", + "required": [ + "access_token", + "access_token_expires_at", + "refresh_token", + "refresh_token_expires_at", + "scopes" + ], + "title": "TokenResponse", + "description": "OAuth 2.0 token response" + }, "TransactionHistory": { "properties": { "transactions": { @@ -9938,6 +10668,18 @@ "required": ["name", "graph_id", "graph_version", "trigger_config"], "title": "TriggeredPresetSetupRequest" }, + "UpdateAppLogoRequest": { + "properties": { + "logo_url": { + "type": "string", + "title": "Logo Url", + "description": "URL of the uploaded logo image" + } + }, + "type": "object", + "required": ["logo_url"], + "title": "UpdateAppLogoRequest" + }, "UpdatePermissionsRequest": { "properties": { "permissions": { diff --git a/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx b/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx index 4269ae5415..843330b085 100644 --- a/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx +++ b/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx @@ -7,6 +7,7 @@ import { ActionButtons } from "./components/ActionButtons"; export interface ErrorCardProps { isSuccess?: boolean; + isOurProblem?: boolean; responseError?: { detail?: Array<{ msg: string }> | string; message?: string; @@ -17,15 +18,18 @@ export interface ErrorCardProps { message?: string; }; context?: string; + hint?: string; onRetry?: () => void; className?: string; } export function ErrorCard({ isSuccess = false, + isOurProblem = true, responseError, httpError, context = "data", + hint, onRetry, className = "", }: ErrorCardProps) { @@ -50,13 +54,19 @@ export function ErrorCard({
- - + {isOurProblem && ( + + )}
); diff --git a/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx b/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx index f232e6ff3f..bfb3726de1 100644 --- a/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx +++ b/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx @@ -4,9 +4,10 @@ import { Text } from "@/components/atoms/Text/Text"; interface Props { errorMessage: string; context: string; + hint?: string; } -export function ErrorMessage({ errorMessage, context }: Props) { +export function ErrorMessage({ errorMessage, context, hint }: Props) { return (
@@ -17,6 +18,13 @@ export function ErrorMessage({ errorMessage, context }: Props) { {errorMessage}
+ {hint && ( +
+ + {hint} + +
+ )}
); } diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts index 0d8be1df5d..2f27ef126d 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts @@ -912,7 +912,7 @@ export interface APIKey { prefix: string; postfix: string; status: APIKeyStatus; - permissions: APIKeyPermission[]; + scopes: APIKeyPermission[]; created_at: string; last_used_at?: string; revoked_at?: string; diff --git a/autogpt_platform/frontend/src/lib/supabase/helpers.ts b/autogpt_platform/frontend/src/lib/supabase/helpers.ts index 7b2d36a0fd..f41c8c2f0f 100644 --- a/autogpt_platform/frontend/src/lib/supabase/helpers.ts +++ b/autogpt_platform/frontend/src/lib/supabase/helpers.ts @@ -4,6 +4,8 @@ import { type CookieOptions } from "@supabase/ssr"; import { SupabaseClient } from "@supabase/supabase-js"; export const PROTECTED_PAGES = [ + "/auth/authorize", + "/auth/integrations", "/monitor", "/build", "/onboarding", @@ -59,14 +61,15 @@ export function hasWebSocketDisconnectIntent(): boolean { // Redirect utilities export function getRedirectPath( - pathname: string, + path: string, // including query strings userRole?: string, ): string | null { - if (shouldRedirectOnLogout(pathname)) { - return "/login"; + if (shouldRedirectOnLogout(path)) { + // Preserve the original path as a 'next' parameter so user can return after login + return `/login?next=${encodeURIComponent(path)}`; } - if (isAdminPage(pathname) && userRole !== "admin") { + if (isAdminPage(path) && userRole !== "admin") { return "/marketplace"; } diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts index cce4f7a769..95b9e8bbca 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts @@ -77,7 +77,7 @@ export async function fetchUser(): Promise { } interface ValidateSessionParams { - pathname: string; + path: string; currentUser: User | null; } @@ -92,7 +92,7 @@ export async function validateSession( params: ValidateSessionParams, ): Promise { try { - const result = await validateSessionAction(params.pathname); + const result = await validateSessionAction(params.path); if (!result.isValid) { return { @@ -118,7 +118,7 @@ export async function validateSession( }; } catch (error) { console.error("Session validation error:", error); - const redirectPath = getRedirectPath(params.pathname); + const redirectPath = getRedirectPath(params.path); return { isValid: false, redirectPath, @@ -146,7 +146,7 @@ interface StorageEventHandlerParams { event: StorageEvent; api: BackendAPI | null; router: AppRouterInstance | null; - pathname: string; + path: string; } interface StorageEventHandlerResult { @@ -167,7 +167,7 @@ export function handleStorageEvent( params.api.disconnectWebSocket(); } - const redirectPath = getRedirectPath(params.pathname); + const redirectPath = getRedirectPath(params.path); return { shouldLogout: true, diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts index 41fdee25a2..5f362397f6 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts @@ -1,8 +1,8 @@ "use client"; import { useBackendAPI } from "@/lib/autogpt-server-api/context"; -import { usePathname, useRouter } from "next/navigation"; -import { useEffect } from "react"; +import { usePathname, useRouter, useSearchParams } from "next/navigation"; +import { useEffect, useMemo } from "react"; import { useShallow } from "zustand/react/shallow"; import type { ServerLogoutOptions } from "../actions"; import { useSupabaseStore } from "./useSupabaseStore"; @@ -10,8 +10,15 @@ import { useSupabaseStore } from "./useSupabaseStore"; export function useSupabase() { const router = useRouter(); const pathname = usePathname(); + const searchParams = useSearchParams(); const api = useBackendAPI(); + // Combine pathname and search params to get full path for redirect preservation + const fullPath = useMemo(() => { + const search = searchParams.toString(); + return search ? `${pathname}?${search}` : pathname; + }, [pathname, searchParams]); + const { user, supabase, @@ -36,9 +43,9 @@ export function useSupabase() { void initialize({ api, router, - pathname, + path: fullPath, }); - }, [api, initialize, pathname, router]); + }, [api, initialize, fullPath, router]); function handleLogout(options: ServerLogoutOptions = {}) { return logOut({ @@ -49,7 +56,7 @@ export function useSupabase() { function handleValidateSession() { return validateSession({ - pathname, + path: fullPath, router, }); } diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts index dcc6029668..5207397ee4 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts @@ -21,7 +21,7 @@ import { interface InitializeParams { api: BackendAPI; router: AppRouterInstance; - pathname: string; + path: string; } interface LogOutParams { @@ -32,7 +32,7 @@ interface LogOutParams { interface ValidateParams { force?: boolean; - pathname?: string; + path?: string; router?: AppRouterInstance; } @@ -47,7 +47,7 @@ interface SupabaseStoreState { listenersCleanup: (() => void) | null; routerRef: AppRouterInstance | null; apiRef: BackendAPI | null; - currentPathname: string; + currentPath: string; initialize: (params: InitializeParams) => Promise; logOut: (params?: LogOutParams) => Promise; validateSession: (params?: ValidateParams) => Promise; @@ -60,7 +60,7 @@ export const useSupabaseStore = create((set, get) => { set({ routerRef: params.router, apiRef: params.api, - currentPathname: params.pathname, + currentPath: params.path, }); const supabaseClient = ensureSupabaseClient(); @@ -83,7 +83,7 @@ export const useSupabaseStore = create((set, get) => { // This handles race conditions after login where cookies might not be immediately available if (!result.user) { const validationResult = await validateSessionHelper({ - pathname: params.pathname, + path: params.path, currentUser: null, }); @@ -160,7 +160,7 @@ export const useSupabaseStore = create((set, get) => { params?: ValidateParams, ): Promise { const router = params?.router ?? get().routerRef; - const pathname = params?.pathname ?? get().currentPathname; + const pathname = params?.path ?? get().currentPath; if (!router || !pathname) return true; if (!params?.force && get().isValidating) return true; @@ -175,7 +175,7 @@ export const useSupabaseStore = create((set, get) => { try { const result = await validateSessionHelper({ - pathname, + path: pathname, currentUser: get().user, }); @@ -224,7 +224,7 @@ export const useSupabaseStore = create((set, get) => { event, api: get().apiRef, router: get().routerRef, - pathname: get().currentPathname, + path: get().currentPath, }); if (!result.shouldLogout) return; @@ -283,7 +283,7 @@ export const useSupabaseStore = create((set, get) => { listenersCleanup: null, routerRef: null, apiRef: null, - currentPathname: "", + currentPath: "", initialize, logOut, validateSession: validateSessionInternal, diff --git a/autogpt_platform/frontend/src/lib/supabase/middleware.ts b/autogpt_platform/frontend/src/lib/supabase/middleware.ts index 5e04efde67..5e4bd01e83 100644 --- a/autogpt_platform/frontend/src/lib/supabase/middleware.ts +++ b/autogpt_platform/frontend/src/lib/supabase/middleware.ts @@ -57,7 +57,9 @@ export async function updateSession(request: NextRequest) { const attemptingAdminPage = isAdminPage(pathname); if (attemptingProtectedPage || attemptingAdminPage) { + const currentDest = url.pathname + url.search; url.pathname = "/login"; + url.search = `?next=${encodeURIComponent(currentDest)}`; return NextResponse.redirect(url); } } diff --git a/autogpt_platform/frontend/src/middleware.ts b/autogpt_platform/frontend/src/middleware.ts index 65edec41d7..af1c823295 100644 --- a/autogpt_platform/frontend/src/middleware.ts +++ b/autogpt_platform/frontend/src/middleware.ts @@ -9,11 +9,15 @@ export const config = { matcher: [ /* * Match all request paths except for the ones starting with: - * - _next/static (static files) - * - _next/image (image optimization files) - * - favicon.ico (favicon file) + * - /_next/static (static files) + * - /_next/image (image optimization files) + * - /favicon.ico (favicon file) + * - /auth/callback (OAuth callback - needs to work without auth) * Feel free to modify this pattern to include more paths. + * + * Note: /auth/authorize and /auth/integrations/* ARE protected and need + * middleware to run for authentication checks. */ - "/((?!_next/static|_next/image|favicon.ico|auth|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)", + "/((?!_next/static|_next/image|favicon.ico|auth/callback|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)", ], }; diff --git a/autogpt_platform/frontend/src/tests/api-keys.spec.ts b/autogpt_platform/frontend/src/tests/api-keys.spec.ts index a42ae8384e..e2a5575aed 100644 --- a/autogpt_platform/frontend/src/tests/api-keys.spec.ts +++ b/autogpt_platform/frontend/src/tests/api-keys.spec.ts @@ -19,8 +19,8 @@ test.describe("API Keys Page", () => { const page = await context.newPage(); try { - await page.goto("/profile/api_keys"); - await hasUrl(page, "/login"); + await page.goto("/profile/api-keys"); + await hasUrl(page, "/login?next=%2Fprofile%2Fapi-keys"); } finally { await page.close(); await context.close(); @@ -29,7 +29,7 @@ test.describe("API Keys Page", () => { test("should create a new API key successfully", async ({ page }) => { const { getButton, getField } = getSelectors(page); - await page.goto("/profile/api_keys"); + await page.goto("/profile/api-keys"); await getButton("Create Key").click(); await getField("Name").fill("Test Key"); @@ -45,7 +45,7 @@ test.describe("API Keys Page", () => { test("should revoke an existing API key", async ({ page }) => { const { getRole, getId } = getSelectors(page); - await page.goto("/profile/api_keys"); + await page.goto("/profile/api-keys"); const apiKeyRow = getId("api-key-row").first(); const apiKeyContent = await apiKeyRow diff --git a/autogpt_platform/frontend/src/tests/profile-form.spec.ts b/autogpt_platform/frontend/src/tests/profile-form.spec.ts index 527c5cca92..1fc1008e9c 100644 --- a/autogpt_platform/frontend/src/tests/profile-form.spec.ts +++ b/autogpt_platform/frontend/src/tests/profile-form.spec.ts @@ -24,7 +24,7 @@ test.describe("Profile Form", () => { try { await page.goto("/profile"); - await hasUrl(page, "/login"); + await hasUrl(page, "/login?next=%2Fprofile"); } finally { await page.close(); await context.close(); diff --git a/autogpt_platform/frontend/src/tests/signin.spec.ts b/autogpt_platform/frontend/src/tests/signin.spec.ts index 6e53855a8e..0f36006c4d 100644 --- a/autogpt_platform/frontend/src/tests/signin.spec.ts +++ b/autogpt_platform/frontend/src/tests/signin.spec.ts @@ -152,10 +152,10 @@ test("multi-tab logout with WebSocket cleanup", async ({ context }) => { // Check if Tab 2 has been redirected to login or refresh the page to trigger redirect try { await page2.reload(); - await hasUrl(page2, "/login"); + await hasUrl(page2, "/login?next=%2Fbuild"); } catch { // If reload fails, the page might already be redirecting - await hasUrl(page2, "/login"); + await hasUrl(page2, "/login?next=%2Fbuild"); } // Verify the profile menu is no longer visible (user is logged out) diff --git a/docs/content/platform/integrating/api-guide.md b/docs/content/platform/integrating/api-guide.md new file mode 100644 index 0000000000..19d210af91 --- /dev/null +++ b/docs/content/platform/integrating/api-guide.md @@ -0,0 +1,85 @@ +# AutoGPT Platform External API Guide + +The AutoGPT Platform provides an External API that allows you to programmatically interact with agents, blocks, the store, and more. + +## API Documentation + +Full API documentation with interactive examples is available at: + +**[https://backend.agpt.co/external-api/docs](https://backend.agpt.co/external-api/docs)** + +This Swagger UI documentation includes all available endpoints, request/response schemas, and allows you to try out API calls directly. + +## Authentication Methods + +The External API supports two authentication methods: + +### 1. API Keys + +API keys are the simplest way to authenticate. Generate an API key from your AutoGPT Platform account settings and include it in your requests: + +```http +GET /external-api/v1/blocks +X-API-Key: your_api_key_here +``` + +API keys are ideal for: +- Server-to-server integrations +- Personal scripts and automation +- Backend services + +### 2. OAuth 2.0 (Single Sign-On) + +For applications that need to act on behalf of users, use OAuth 2.0. This allows users to authorize your application to access their AutoGPT resources. + +OAuth is ideal for: +- Third-party applications +- "Sign in with AutoGPT" (SSO, Single Sign-On) functionality +- Applications that need user-specific permissions + +See the [SSO Integration Guide](sso-guide.md) for complete OAuth implementation details. + +## Available Scopes + +When using OAuth, request only the scopes your application needs: + +| Scope | Description | +|-------|-------------| +| `IDENTITY` | Read user ID, e-mail, and timezone | +| `EXECUTE_GRAPH` | Run agents | +| `READ_GRAPH` | Read agent run results | +| `EXECUTE_BLOCK` | Run individual blocks | +| `READ_BLOCK` | Read block definitions | +| `READ_STORE` | Access the agent store | +| `USE_TOOLS` | Use platform tools | +| `MANAGE_INTEGRATIONS` | Create and update user integrations | +| `READ_INTEGRATIONS` | Read user integration status | +| `DELETE_INTEGRATIONS` | Remove user integrations | + +## Quick Start + +### Using an API Key + +```bash +# List available blocks +curl -H "X-API-Key: YOUR_API_KEY" \ + https://backend.agpt.co/external-api/v1/blocks +``` + +### Using OAuth + +1. Register an OAuth application (contact platform administrator) +2. Implement the OAuth flow as described in the [SSO Guide](sso-guide.md) +3. Use the obtained access token: + +```bash +curl -H "Authorization: Bearer agpt_xt_..." \ + https://backend.agpt.co/external-api/v1/blocks +``` + +## Support + +For issues or questions about API integration: + +- Open an issue on [GitHub](https://github.com/Significant-Gravitas/AutoGPT) +- Check the [Swagger documentation](https://backend.agpt.co/external-api/docs) diff --git a/docs/content/platform/integrating/oauth-guide.md b/docs/content/platform/integrating/oauth-guide.md new file mode 100644 index 0000000000..d88ef385e1 --- /dev/null +++ b/docs/content/platform/integrating/oauth-guide.md @@ -0,0 +1,440 @@ +# AutoGPT Platform OAuth Integration Guide + +This guide explains how to integrate your application with AutoGPT Platform using OAuth 2.0. OAuth can be used for API access, Single Sign-On (SSO), or both. + +For general API information and endpoint documentation, see the [API Guide](api-guide.md) and the [Swagger documentation](https://backend.agpt.co/external-api/docs). + +## Overview + +AutoGPT Platform's OAuth implementation supports multiple use cases: + +### OAuth for API Access + +Use OAuth when your application needs to call AutoGPT APIs on behalf of users. This is the most common use case for third-party integrations. + +**When to use:** + +- Your app needs to run agents, access the store, or manage integrations for users +- You want user-specific permissions rather than a single API key +- Users should be able to revoke access to your app + +### SSO: "Sign in with AutoGPT" + +Use SSO when you want users to sign in to your app through their AutoGPT account. Request the `IDENTITY` scope to get user information. + +**When to use:** + +- You want to use AutoGPT as an identity provider +- Users already have AutoGPT accounts and you want seamless login +- You need to identify users without managing passwords + +**Note:** SSO and API access can be combined. Request `IDENTITY` along with other scopes to both authenticate users and access APIs on their behalf. + +### Integration Setup Wizard + +A separate flow that guides users through connecting third-party services (GitHub, Google, etc.) to their AutoGPT account. See [Integration Setup Wizard](#integration-setup-wizard) below. + +## Prerequisites + +Before integrating, you need an OAuth application registered with AutoGPT Platform. Contact the platform administrator to obtain: + +- **Client ID** - Public identifier for your application +- **Client Secret** - Secret key for authenticating your application (keep this secure!) +- **Registered Redirect URIs** - URLs where users will be redirected after authorization + +## OAuth Flow + +The OAuth flow is technically the same whether you're using it for API access, SSO, or both. The main difference is which scopes you request. + +### Step 1: Redirect User to Authorization + +Redirect the user to the AutoGPT authorization page with the required parameters: + +```url +https://platform.agpt.co/auth/authorize? + client_id={YOUR_CLIENT_ID}& + redirect_uri=https://yourapp.com/callback& + scope=EXECUTE_GRAPH READ_GRAPH& + state={RANDOM_STATE_TOKEN}& + code_challenge={PKCE_CHALLENGE}& + code_challenge_method=S256& + response_type=code +``` + +#### Parameters + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `client_id` | Yes | Your OAuth application's client ID | +| `redirect_uri` | Yes | URL to redirect after authorization (must match registered URI) | +| `scope` | Yes | Space-separated list of permissions (see [Available Scopes](api-guide.md#available-scopes)) | +| `state` | Yes | Random string to prevent CSRF attacks (store and verify on callback) | +| `code_challenge` | Yes | PKCE code challenge (see [PKCE](#pkce-implementation)) | +| `code_challenge_method` | Yes | Must be `S256` | +| `response_type` | Yes | Must be `code` | + +### Step 2: Handle the Callback + +After the user approves (or denies) access, they'll be redirected to your `redirect_uri`: + +**Success:** + +```url +https://yourapp.com/callback?code=AUTHORIZATION_CODE&state=RANDOM_STATE_TOKEN +``` + +**Error:** + +```url +https://yourapp.com/callback?error=access_denied&error_description=User%20denied%20access&state=RANDOM_STATE_TOKEN +``` + +Always verify the `state` parameter matches what you sent in Step 1. + +### Step 3: Exchange Code for Tokens + +Exchange the authorization code for access and refresh tokens: + +```http +POST /api/oauth/token +Content-Type: application/json + +{ + "grant_type": "authorization_code", + "code": "{AUTHORIZATION_CODE}", + "redirect_uri": "https://yourapp.com/callback", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}", + "code_verifier": "{PKCE_VERIFIER}" +} +``` + +**Response:** + +```json +{ + "token_type": "Bearer", + "access_token": "agpt_xt_...", + "access_token_expires_at": "2025-01-15T12:00:00Z", + "refresh_token": "agpt_rt_...", + "refresh_token_expires_at": "2025-02-14T12:00:00Z", + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"] +} +``` + +### Step 4: Use the Access Token + +Include the access token in API requests: + +```http +GET /external-api/v1/blocks +Authorization: Bearer agpt_xt_... +``` + +**For SSO:** If you requested the `IDENTITY` scope, fetch user info to identify the user: + +```http +GET /external-api/v1/me +Authorization: Bearer agpt_xt_... +``` + +**Response:** + +```json +{ + "id": "user-uuid", + "name": "John Doe", + "email": "john@example.com", + "timezone": "Europe/Amsterdam" +} +``` + +See the [Swagger documentation](https://backend.agpt.co/external-api/docs) for all available endpoints. + +### Step 5: Refresh Tokens + +Access tokens expire after 1 hour. Use the refresh token to get new tokens: + +```http +POST /api/oauth/token +Content-Type: application/json + +{ + "grant_type": "refresh_token", + "refresh_token": "agpt_rt_...", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +**Response:** + +```json +{ + "token_type": "Bearer", + "access_token": "agpt_xt_...", + "access_token_expires_at": "2025-01-15T13:00:00Z", + "refresh_token": "agpt_rt_...", + "refresh_token_expires_at": "2025-02-14T12:00:00Z", + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"] +} +``` + +## Integration Setup Wizard + +The Integration Setup Wizard guides users through connecting third-party services (like GitHub, Google, etc.) to their AutoGPT account. This is useful when your application needs users to have specific integrations configured. + +### Redirect to the Wizard + +```url +https://platform.agpt.co/auth/integrations/setup-wizard? + client_id={YOUR_CLIENT_ID}& + providers={BASE64_ENCODED_PROVIDERS}& + redirect_uri=https://yourapp.com/callback& + state={RANDOM_STATE_TOKEN} +``` + +#### Parameters + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `client_id` | Yes | Your OAuth application's client ID | +| `providers` | Yes | Base64-encoded JSON array of provider configurations | +| `redirect_uri` | Yes | URL to redirect after setup completes | +| `state` | Yes | Random string to prevent CSRF attacks | + +#### Provider Configuration + +The `providers` parameter is a Base64-encoded JSON array: + +```javascript +const providers = [ + { provider: 'github', scopes: ['repo', 'read:user'] }, + { provider: 'google', scopes: ['https://www.googleapis.com/auth/calendar'] }, + { provider: 'slack' } // Uses default scopes +]; + +const providersBase64 = btoa(JSON.stringify(providers)); +``` + +### Handle the Callback + +After setup completes: + +**Success:** + +```url +https://yourapp.com/callback?success=true&state=RANDOM_STATE_TOKEN +``` + +**Failure/Cancelled:** + +```url +https://yourapp.com/callback?success=false&state=RANDOM_STATE_TOKEN +``` + +## Provider Scopes Reference + +When using the Integration Setup Wizard, you need to specify which scopes to request from each provider. Here are common providers and their scopes: + +### GitHub + +Documentation: https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps + +| Scope | Description | +|-------|-------------| +| `repo` | Full control of private repositories | +| `read:user` | Read user profile data | +| `user:email` | Access user email addresses | +| `gist` | Create and manage gists | +| `workflow` | Update GitHub Actions workflows | + +**Example:** + +```javascript +{ provider: 'github', scopes: ['repo', 'read:user'] } +``` + +### Google + +Documentation: https://developers.google.com/identity/protocols/oauth2/scopes + +| Scope | Description | +|-------|-------------| +| `email` | View email address (default) | +| `profile` | View basic profile info (default) | +| `openid` | OpenID Connect (default) | +| `https://www.googleapis.com/auth/calendar` | Google Calendar access | +| `https://www.googleapis.com/auth/drive` | Google Drive access | +| `https://www.googleapis.com/auth/gmail.readonly` | Read Gmail messages | + +**Example:** + +```javascript +{ provider: 'google', scopes: ['https://www.googleapis.com/auth/calendar'] } +// Or use defaults (email, profile, openid): +{ provider: 'google' } +``` + +### Notion + +Documentation: https://developers.notion.com/reference/capabilities + +Notion uses a single OAuth scope that grants access based on pages the user selects during authorization. + +### Linear + +Documentation: https://developers.linear.app/docs/oauth/authentication + +| Scope | Description | +|-------|-------------| +| `read` | Read access to Linear data | +| `write` | Write access to Linear data | +| `issues:create` | Create issues | + +## PKCE Implementation + +PKCE (Proof Key for Code Exchange) is required for all authorization requests. Here's how to implement it: + +### JavaScript Example + +```javascript +async function generatePkce() { + // Generate a random code verifier + const array = new Uint8Array(32); + crypto.getRandomValues(array); + const verifier = Array.from(array, b => b.toString(16).padStart(2, '0')).join(''); + + // Create SHA-256 hash and base64url encode it + const hash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(verifier)); + const challenge = btoa(String.fromCharCode(...new Uint8Array(hash))) + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=+$/, ''); + + return { verifier, challenge }; +} + +// Usage: +const pkce = await generatePkce(); +// Store pkce.verifier securely (e.g., in session storage) +// Use pkce.challenge in the authorization URL +``` + +### Python Example + +```python +import hashlib +import base64 +import secrets + +def generate_pkce(): + # Generate a random code verifier + verifier = secrets.token_urlsafe(32) + + # Create SHA-256 hash and base64url encode it + digest = hashlib.sha256(verifier.encode()).digest() + challenge = base64.urlsafe_b64encode(digest).decode().rstrip('=') + + return verifier, challenge + +# Usage: +verifier, challenge = generate_pkce() +# Store verifier securely in session +# Use challenge in the authorization URL +``` + +## Token Management + +### Token Lifetimes + +| Token Type | Lifetime | +|------------|----------| +| Access Token | 1 hour | +| Refresh Token | 30 days | +| Authorization Code | 10 minutes | + +### Token Introspection + +Check if a token is valid: + +```http +POST /api/oauth/introspect +Content-Type: application/json + +{ + "token": "agpt_xt_...", + "token_type_hint": "access_token", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +**Response:** + +```json +{ + "active": true, + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "client_id": "agpt_client_...", + "user_id": "user-uuid", + "exp": 1705320000, + "token_type": "access_token" +} +``` + +### Token Revocation + +Revoke a token when the user logs out: + +```http +POST /api/oauth/revoke +Content-Type: application/json + +{ + "token": "agpt_xt_...", + "token_type_hint": "access_token", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +## Security Best Practices + +1. **Store client secrets securely** - Never expose them in client-side code or version control +2. **Always use PKCE** - Required for all authorization requests +3. **Validate state parameters** - Prevents CSRF attacks +4. **Use HTTPS** - All production redirect URIs must use HTTPS +5. **Request minimal scopes** - Only request the permissions your app needs +6. **Handle token expiration** - Implement automatic token refresh +7. **Revoke tokens on logout** - Clean up when users disconnect your app + +## Error Handling + +### Common OAuth Errors + +| Error | Description | Solution | +|-------|-------------|----------| +| `invalid_client` | Client ID not found or inactive | Verify client ID is correct | +| `invalid_redirect_uri` | Redirect URI not registered | Register URI with platform admin | +| `invalid_scope` | Requested scope not allowed | Check allowed scopes for your app | +| `invalid_grant` | Code expired or already used | Authorization codes are single-use | +| `access_denied` | User denied authorization | Handle gracefully in your UI | + +### HTTP Status Codes + +| Code | Meaning | +|------|---------| +| 200 | Success | +| 400 | Bad request (invalid parameters) | +| 401 | Unauthorized (invalid/expired token) | +| 403 | Forbidden (insufficient scope) | +| 404 | Resource not found | + +## Support + +For issues or questions about OAuth integration: + +- Open an issue on [GitHub](https://github.com/Significant-Gravitas/AutoGPT) +- See the [API Guide](api-guide.md) for general API information +- Check the [Swagger documentation](https://backend.agpt.co/external-api/docs) for endpoint details diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index ebf987f34b..876467633e 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -7,14 +7,14 @@ docs_dir: content nav: - Home: index.md - - The AutoGPT Platform 🆕: - - Getting Started: + - The AutoGPT Platform 🆕: + - Getting Started: - Setup AutoGPT (Local-Host): platform/getting-started.md - Edit an Agent: platform/edit-agent.md - Delete an Agent: platform/delete-agent.md - - Download & Import and Agent: platform/download-agent-from-marketplace-local.md + - Download & Import and Agent: platform/download-agent-from-marketplace-local.md - Create a Basic Agent: platform/create-basic-agent.md - - Submit an Agent to the Marketplace: platform/submit-agent-to-marketplace.md + - Submit an Agent to the Marketplace: platform/submit-agent-to-marketplace.md - Advanced Setup: platform/advanced_setup.md - Agent Blocks: platform/agent-blocks.md - Build your own Blocks: platform/new_blocks.md @@ -23,6 +23,9 @@ nav: - Using AI/ML API: platform/aimlapi.md - Using D-ID: platform/d_id.md - Blocks: platform/blocks/blocks.md + - API: + - Introduction: platform/integrating/api-guide.md + - OAuth & SSO: platform/integrating/oauth-guide.md - Contributing: - Tests: platform/contributing/tests.md - OAuth Flows: platform/contributing/oauth-integration-flow.md From 217e3718d7ea0ecdca27221aad58b738944e3229 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Sat, 20 Dec 2025 16:52:51 +0100 Subject: [PATCH 13/25] feat(platform): implement HITL UI redesign with improved review flow (#11529) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary • Redesigned Human-in-the-Loop review interface with yellow warning scheme • Implemented separate approved_data/rejected_data output pins for human_in_the_loop block • Added real-time execution status tracking to legacy flow for review detection • Fixed button loading states and improved UI consistency across flows • Standardized Tailwind CSS usage removing custom values image image image ## Changes Made ### Backend Changes - Modified `human_in_the_loop.py` block to output separate `approved_data` and `rejected_data` pins instead of single reviewed_data with status - Updated block output schema to support better data flow in graph builder ### Frontend UI Changes - Redesigned PendingReviewsList with yellow warning color scheme (replacing orange) - Fixed button loading states to show spinner only on clicked button - Improved FloatingReviewsPanel layout removing redundant headers - Added real-time status tracking to legacy flow using useFlowRealtime hook - Fixed AgentActivityDropdown text overflow and layout issues - Enhanced Safe Mode toggle positioning and toast timing - Standardized all custom Tailwind values to use standard classes ### Design System Updates - Added yellow design tokens (25, 150, 600) for warning states - Unified REVIEW status handling across all components - Improved component composition patterns ## Test Plan - [x] Verify HITL blocks create separate output pins for approved/rejected data - [x] Test review flow works in both new and legacy flow builders - [x] Confirm button loading states work correctly (only clicked button shows spinner) - [x] Validate AgentActivityDropdown properly displays review status - [x] Check Safe Mode toggle positioning matches old flow - [x] Ensure real-time status updates work in legacy flow - [x] Verify yellow warning colors are consistent throughout 🤖 Generated with [Claude Code](https://claude.ai/code) --------- Co-authored-by: Lluis Agusti --- .../components/FloatingSafeModeToogle.tsx | 86 +++++++++++ .../build/components/FlowEditor/Flow/Flow.tsx | 5 +- .../components/NodeExecutionBadge.tsx | 2 +- .../FlowEditor/nodes/CustomNode/helpers.ts | 2 +- .../components/legacy-builder/Flow/Flow.tsx | 9 +- .../NewAgentLibraryView.tsx | 57 +++++-- .../CredentialRow/CredentialRow.tsx | 2 +- .../components/other/AgentSettingsButton.tsx | 29 ++++ .../selected-views/AnchorLinksWrap.tsx | 14 -- .../selected-views/LoadingSelectedContent.tsx | 12 +- .../SelectedRunView/SelectedRunView.tsx | 61 ++++---- .../components/RunStatusBadge.tsx | 8 +- .../SelectedRunView/components/RunSummary.tsx | 2 +- .../components/SafeModeToggle.tsx | 52 +++++++ .../SelectedRunActions/SelectedRunActions.tsx | 4 +- .../SelectedScheduleView.tsx | 12 +- .../SelectedSettingsView.tsx | 67 ++++++++ .../SelectedTemplateView.tsx | 4 +- .../SelectedTriggerView.tsx | 4 +- .../selected-views/SelectedViewLayout.tsx | 32 +++- .../components/TaskListItem.tsx | 4 +- .../useNewAgentLibraryView.ts | 14 +- .../components/agent-run-status-chip.tsx | 4 +- .../monitoring/components/AgentFlowList.tsx | 2 +- .../AgentActivityDropdown.tsx | 3 +- .../ActivityDropdown/ActivityDropdown.tsx | 2 +- .../components/ActivityItem.tsx | 111 ++++++------- .../molecules/Breadcrumbs/Breadcrumbs.tsx | 20 ++- .../FloatingReviewsPanel.tsx | 35 +++-- .../PendingReviewCard/PendingReviewCard.tsx | 133 +++++++++------- .../PendingReviewsList/PendingReviewsList.tsx | 146 ++++++++---------- .../frontend/src/components/styles/colors.ts | 4 +- .../useAgentSafeMode.ts} | 86 ++--------- .../frontend/src/hooks/useExecutionEvents.ts | 23 ++- 34 files changed, 648 insertions(+), 403 deletions(-) create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedSettingsView/SelectedSettingsView.tsx rename autogpt_platform/frontend/src/{components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx => hooks/useAgentSafeMode.ts} (65%) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx new file mode 100644 index 0000000000..c1a7ef3b35 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx @@ -0,0 +1,86 @@ +import { GraphModel } from "@/app/api/__generated__/models/graphModel"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { Button } from "@/components/atoms/Button/Button"; +import { Graph } from "@/lib/autogpt-server-api/types"; +import { cn } from "@/lib/utils"; +import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; +import { Text } from "@/components/atoms/Text/Text"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; + +interface Props { + graph: GraphModel | LibraryAgent | Graph; + className?: string; + fullWidth?: boolean; +} + +export function FloatingSafeModeToggle({ + graph, + className, + fullWidth = false, +}: Props) { + const { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + } = useAgentSafeMode(graph); + + if (!shouldShowToggle || isStateUndetermined || isPending) { + return null; + } + + return ( +
+ + + + + +
+
+ Safe Mode: {currentSafeMode! ? "ON" : "OFF"} +
+
+ {currentSafeMode! + ? "Human in the loop blocks require manual review" + : "Human in the loop blocks proceed automatically"} +
+
+
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx index 13268fc816..d312fd487d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx @@ -16,12 +16,12 @@ import { useCopyPaste } from "./useCopyPaste"; import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel"; import { parseAsString, useQueryStates } from "nuqs"; import { CustomControls } from "./components/CustomControl"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { okData } from "@/app/api/helpers"; import { TriggerAgentBanner } from "./components/TriggerAgentBanner"; import { resolveCollisions } from "./helpers/resolve-collision"; +import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle"; export const Flow = () => { const [{ flowID, flowExecutionID }] = useQueryStates({ @@ -113,8 +113,7 @@ export const Flow = () => { {graph && ( )} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx index acc0c26156..5571274ffb 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx @@ -9,7 +9,7 @@ const statusStyles: Record = { INCOMPLETE: "text-slate-700 border-slate-400", QUEUED: "text-blue-700 border-blue-400", RUNNING: "text-amber-700 border-amber-400", - REVIEW: "text-orange-700 border-orange-400 bg-orange-50", + REVIEW: "text-yellow-700 border-yellow-400 bg-yellow-50", COMPLETED: "text-green-700 border-green-400", TERMINATED: "text-orange-700 border-orange-400", FAILED: "text-red-700 border-red-400", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts index 2093fed40f..8d228d0cd0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts @@ -4,7 +4,7 @@ export const nodeStyleBasedOnStatus: Record = { INCOMPLETE: "ring-slate-300 bg-slate-300", QUEUED: " ring-blue-300 bg-blue-300", RUNNING: "ring-amber-300 bg-amber-300", - REVIEW: "ring-orange-300 bg-orange-300", + REVIEW: "ring-yellow-300 bg-yellow-300", COMPLETED: "ring-green-300 bg-green-300", TERMINATED: "ring-orange-300 bg-orange-300 ", FAILED: "ring-red-300 bg-red-300", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx index 7e9b54d626..80a6fb022d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx @@ -65,7 +65,8 @@ import NewControlPanel from "@/app/(platform)/build/components/NewControlPanel/N import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { BuildActionBar } from "../BuildActionBar"; import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; +import { useFlowRealtime } from "@/app/(platform)/build/components/FlowEditor/Flow/useFlowRealtime"; +import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle"; // This is for the history, this is the minimum distance a block must move before it is logged // It helps to prevent spamming the history with small movements especially when pressing on a input in a block @@ -153,6 +154,9 @@ const FlowEditor: React.FC<{ Record >(Object.fromEntries(nodes.map((node) => [node.id, node.position]))); + // Add realtime execution status tracking for FloatingReviewsPanel + useFlowRealtime(); + const router = useRouter(); const pathname = usePathname(); const params = useSearchParams(); @@ -924,8 +928,7 @@ const FlowEditor: React.FC<{ {savedAgent && ( )} {isNewBlockEnabled ? ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx index 2831d6cdba..2d7a1b30f4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx @@ -5,6 +5,7 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { cn } from "@/lib/utils"; import { PlusIcon } from "@phosphor-icons/react"; +import { useEffect } from "react"; import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal"; import { AgentRunsLoading } from "./components/other/AgentRunsLoading"; import { EmptySchedules } from "./components/other/EmptySchedules"; @@ -17,6 +18,7 @@ import { SelectedRunView } from "./components/selected-views/SelectedRunView/Sel import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView"; import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView"; import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView"; +import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout"; import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers"; @@ -24,7 +26,6 @@ import { useNewAgentLibraryView } from "./useNewAgentLibraryView"; export function NewAgentLibraryView() { const { - agentId, agent, ready, activeTemplate, @@ -39,10 +40,17 @@ export function NewAgentLibraryView() { handleCountsChange, handleClearSelectedRun, onRunInitiated, + handleSelectSettings, onTriggerSetup, onScheduleCreated, } = useNewAgentLibraryView(); + useEffect(() => { + if (agent) { + document.title = `${agent.name} - Library - AutoGPT Platform`; + } + }, [agent]); + if (error) { return (
- +
+ +
{activeItem ? ( - activeTab === "scheduled" ? ( + activeItem === "settings" ? ( + + ) : activeTab === "scheduled" ? ( ) ) : sidebarLoading ? ( - + ) : activeTab === "scheduled" ? ( - + ) : activeTab === "templates" ? ( - + ) : activeTab === "triggers" ? ( - + ) : ( - + {"*".repeat(MASKED_KEY_LENGTH)} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx new file mode 100644 index 0000000000..bc710ebc4e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx @@ -0,0 +1,29 @@ +import { Button } from "@/components/atoms/Button/Button"; +import { GearIcon } from "@phosphor-icons/react"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; + +interface Props { + agent: LibraryAgent; + onSelectSettings: () => void; +} + +export function AgentSettingsButton({ agent, onSelectSettings }: Props) { + const { hasHITLBlocks } = useAgentSafeMode(agent); + + if (!hasHITLBlocks) { + return null; + } + + return ( + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx deleted file mode 100644 index 6dae969142..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { cn } from "@/lib/utils"; -import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; - -type Props = { - children: React.ReactNode; -}; - -export function AnchorLinksWrap({ children }: Props) { - return ( -
- -
- ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx index d239f57e31..dc2bb7cac2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx @@ -1,16 +1,22 @@ import { Skeleton } from "@/components/__legacy__/ui/skeleton"; import { cn } from "@/lib/utils"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; import { SelectedViewLayout } from "./SelectedViewLayout"; interface Props { - agentName: string; - agentId: string; + agent: LibraryAgent; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function LoadingSelectedContent(props: Props) { return ( - +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx index ff9a4e5809..9e470139ff 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx @@ -32,6 +32,8 @@ interface Props { runId: string; onSelectRun?: (id: string) => void; onClearSelectedRun?: () => void; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function SelectedRunView({ @@ -39,6 +41,7 @@ export function SelectedRunView({ runId, onSelectRun, onClearSelectedRun, + onSelectSettings, }: Props) { const { run, preset, isLoading, responseError, httpError } = useSelectedRunView(agent.graph_id, runId); @@ -72,13 +75,13 @@ export function SelectedRunView({ } if (isLoading && !run) { - return ; + return ; } return (
- +
@@ -106,6 +109,11 @@ export function SelectedRunView({ className="-mt-2 flex flex-col" > + {withReviews && ( + + Reviews ({pendingReviews.length}) + + )} {withSummary && ( Summary @@ -117,13 +125,29 @@ export function SelectedRunView({ Your input - {withReviews && ( - - Reviews ({pendingReviews.length}) - - )}
+ {/* Human-in-the-Loop Reviews Section */} + {withReviews && ( + +
+ {reviewsLoading ? ( + + ) : pendingReviews.length > 0 ? ( + + ) : ( + + No pending reviews for this execution + + )} +
+
+ )} + {/* Summary Section */} {withSummary && ( @@ -186,29 +210,6 @@ export function SelectedRunView({
- - {/* Reviews Section */} - {withReviews && ( - -
- - {reviewsLoading ? ( - - ) : pendingReviews.length > 0 ? ( - - ) : ( - - No pending reviews for this execution - - )} - -
-
- )}
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx index cf92280c86..3781e724ad 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx @@ -2,10 +2,10 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut import { CheckCircleIcon, ClockIcon, - EyeIcon, PauseCircleIcon, StopCircleIcon, WarningCircleIcon, + WarningIcon, XCircleIcon, } from "@phosphor-icons/react"; import { Text } from "@/components/atoms/Text/Text"; @@ -38,9 +38,9 @@ const statusIconMap: Record = { textColor: "!text-yellow-700", }, REVIEW: { - icon: , - bgColor: "bg-orange-50", - textColor: "!text-orange-700", + icon: , + bgColor: "bg-yellow-50", + textColor: "!text-yellow-700", }, COMPLETED: { icon: ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx index aea9bae7f9..c887ae969d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx @@ -25,7 +25,7 @@ export function RunSummary({ run }: Props) {

{typeof correctnessScore === "number" && ( -
+
Success Estimate: diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx new file mode 100644 index 0000000000..9ba37d8d17 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx @@ -0,0 +1,52 @@ +import { GraphModel } from "@/app/api/__generated__/models/graphModel"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { Button } from "@/components/atoms/Button/Button"; +import { Graph } from "@/lib/autogpt-server-api/types"; +import { cn } from "@/lib/utils"; +import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; + +interface Props { + graph: GraphModel | LibraryAgent | Graph; + className?: string; + fullWidth?: boolean; +} + +export function SafeModeToggle({ graph }: Props) { + const { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + } = useAgentSafeMode(graph); + + if (!shouldShowToggle || isStateUndetermined) { + return null; + } + + return ( + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx index 92db3e0b37..cb821b2ecd 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx @@ -2,7 +2,6 @@ import { GraphExecution } from "@/app/api/__generated__/models/graphExecution"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { ArrowBendLeftUpIcon, @@ -16,6 +15,7 @@ import { SelectedActionsWrap } from "../../../SelectedActionsWrap"; import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton"; import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal"; import { useSelectedRunActions } from "./useSelectedRunActions"; +import { SafeModeToggle } from "../SafeModeToggle"; type Props = { agent: LibraryAgent; @@ -113,7 +113,7 @@ export function SelectedRunActions({ shareToken={run.share_token} /> )} - + {canRunManually && ( <> + Agent Settings +
+ +
+ {!hasHITLBlocks ? ( +
+ + This agent doesn't have any human-in-the-loop blocks, so + there are no settings to configure. + +
+ ) : ( +
+
+
+ Require human approval + + The agent will pause and wait for your review before + continuing + +
+ +
+
+ )} +
+
+ + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx index ead985457e..71f7d582bd 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx @@ -87,7 +87,7 @@ export function SelectedTemplateView({ } if (isLoading && !template) { - return ; + return ; } if (!template) { @@ -100,7 +100,7 @@ export function SelectedTemplateView({ return (
- +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx index 64d4430e78..f8ec392f09 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx @@ -81,7 +81,7 @@ export function SelectedTriggerView({ } if (isLoading && !trigger) { - return ; + return ; } if (!trigger) { @@ -93,7 +93,7 @@ export function SelectedTriggerView({ return (
- +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx index 242430ba6a..df549bea58 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx @@ -1,11 +1,15 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; +import { AgentSettingsButton } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; import { SectionWrap } from "../other/SectionWrap"; interface Props { - agentName: string; - agentId: string; + agent: LibraryAgent; children: React.ReactNode; + additionalBreadcrumb?: { name: string; link?: string }; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function SelectedViewLayout(props: Props) { @@ -14,12 +18,24 @@ export function SelectedViewLayout(props: Props) {
- +
+ + {props.agent && props.onSelectSettings && ( +
+ +
+ )} +
{props.children} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx index 22adc54e4f..2e2871e15e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx @@ -34,8 +34,8 @@ const statusIconMap: Record = { ), REVIEW: ( - - + + ), COMPLETED: ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts index b280400401..394edb1a6d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts @@ -89,10 +89,8 @@ export function useNewAgentLibraryView() { [sidebarCounts], ); - // Show sidebar layout while loading or when there are items - const showSidebarLayout = sidebarLoading || hasAnyItems; - - useEffect(() => { + // Show sidebar layout while loading or when there are items or settings is selected + const showSidebarLayout = useEffect(() => { if (agent) { document.title = `${agent.name} - Library - AutoGPT Platform`; } @@ -134,6 +132,13 @@ export function useNewAgentLibraryView() { }); } + function handleSelectSettings() { + setQueryStates({ + activeItem: "settings", + activeTab: "runs", // Reset to runs tab when going to settings + }); + } + const handleCountsChange = useCallback( (counts: { runsCount: number; @@ -205,6 +210,7 @@ export function useNewAgentLibraryView() { handleCountsChange, handleSelectRun, onRunInitiated, + handleSelectSettings, onTriggerSetup, onScheduleCreated, }; diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx index 24b2864359..58f1ee8381 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx @@ -38,7 +38,7 @@ const statusData: Record< draft: { label: "Draft", variant: "secondary" }, stopped: { label: "Stopped", variant: "secondary" }, scheduled: { label: "Scheduled", variant: "secondary" }, - review: { label: "In Review", variant: "orange" }, + review: { label: "In Review", variant: "warning" }, }; const statusStyles = { @@ -47,8 +47,6 @@ const statusStyles = { destructive: "bg-red-100 text-red-800 hover:bg-red-100 hover:text-red-800", warning: "bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800", - orange: - "bg-orange-100 text-orange-800 hover:bg-orange-100 hover:text-orange-800", info: "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800", secondary: "bg-slate-100 text-slate-800 hover:bg-slate-100 hover:text-slate-800", diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx index 1080a355cd..d4cb6fc649 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx @@ -54,7 +54,7 @@ export const AgentFlowList = ({
{/* Split "Create" button */} - diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx index 6dcea33d51..1d120c3b09 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx @@ -48,8 +48,7 @@ export function AgentActivityDropdown() { className="absolute bottom-[-2.5rem] left-1/2 z-50 hidden -translate-x-1/2 transform whitespace-nowrap rounded-small bg-white px-4 py-2 shadow-md group-hover:block" > - {activeCount} running agent - {activeCount > 1 ? "s" : ""} + {activeCount} active agent{activeCount > 1 ? "s" : ""}
diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx index 4eafee8f2e..263453b327 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx @@ -130,7 +130,7 @@ export function ActivityDropdown({ {filteredExecutions.length > 0 ? ( ; + return ; case AgentExecutionStatus.RUNNING: return ( - + ); case AgentExecutionStatus.COMPLETED: - return ( - - ); + return ; case AgentExecutionStatus.FAILED: - return ; + return ; case AgentExecutionStatus.TERMINATED: - return ( - - ); + return ; case AgentExecutionStatus.INCOMPLETE: - return ; + return ; case AgentExecutionStatus.REVIEW: - return ; + return ; default: return null; } } - function getTimeDisplay() { + function getItemDisplay() { + // Handle active statuses (running/queued) const isActiveStatus = execution.status === AgentExecutionStatus.RUNNING || - execution.status === AgentExecutionStatus.QUEUED || - execution.status === AgentExecutionStatus.REVIEW; + execution.status === AgentExecutionStatus.QUEUED; if (isActiveStatus) { const timeAgo = formatTimeAgo(execution.started_at.toString()); - let statusText = "running"; - if (execution.status === AgentExecutionStatus.QUEUED) { - statusText = "queued"; - } - return `Started ${timeAgo}, ${getExecutionDuration(execution)} ${statusText}`; + const statusText = + execution.status === AgentExecutionStatus.QUEUED ? "queued" : "running"; + return [ + `Started ${timeAgo}, ${getExecutionDuration(execution)} ${statusText}`, + ]; } - if (execution.ended_at) { - const timeAgo = formatTimeAgo(execution.ended_at.toString()); - switch (execution.status) { - case AgentExecutionStatus.COMPLETED: - return `Completed ${timeAgo}`; - case AgentExecutionStatus.FAILED: - return `Failed ${timeAgo}`; - case AgentExecutionStatus.TERMINATED: - return `Stopped ${timeAgo}`; - case AgentExecutionStatus.INCOMPLETE: - return `Incomplete ${timeAgo}`; - case AgentExecutionStatus.REVIEW: - return `In review ${timeAgo}`; - default: - return `Ended ${timeAgo}`; - } + // Handle all other statuses with time display + const timeAgo = execution.ended_at + ? formatTimeAgo(execution.ended_at.toString()) + : formatTimeAgo(execution.started_at.toString()); + + let statusText = "ended"; + switch (execution.status) { + case AgentExecutionStatus.COMPLETED: + statusText = "completed"; + break; + case AgentExecutionStatus.FAILED: + statusText = "failed"; + break; + case AgentExecutionStatus.TERMINATED: + statusText = "stopped"; + break; + case AgentExecutionStatus.INCOMPLETE: + statusText = "incomplete"; + break; + case AgentExecutionStatus.REVIEW: + statusText = "awaiting approval"; + break; } - return "Unknown"; + return [ + `${statusText.charAt(0).toUpperCase() + statusText.slice(1)} ${timeAgo}`, + ]; } // Determine the tab based on execution status @@ -101,20 +100,22 @@ export function ActivityItem({ execution }: Props) { {/* Icon + Agent Name */}
{getStatusIcon()} - + {execution.agent_name}
{/* Agent Message - Indented */}
- {/* Time - Indented */} - - {getTimeDisplay()} - + {getItemDisplay().map((line, index) => ( + + {line} + + ))}
); diff --git a/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx b/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx index 1fb69763fe..f990970a6c 100644 --- a/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx +++ b/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx @@ -4,7 +4,7 @@ import * as React from "react"; interface BreadcrumbItem { name: string; - link: string; + link?: string; } interface Props { @@ -16,12 +16,18 @@ export function Breadcrumbs({ items }: Props) {
{items.map((item, index) => ( - - {item.name} - + {item.link ? ( + + {item.name} + + ) : ( + + {item.name} + + )} {index < items.length - 1 && ( / diff --git a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx index 183fd8599e..12014e50fe 100644 --- a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx +++ b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx @@ -7,6 +7,8 @@ import { cn } from "@/lib/utils"; import { Text } from "@/components/atoms/Text/Text"; import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; +import { useGraphStore } from "@/app/(platform)/build/stores/graphStore"; +import { useShallow } from "zustand/react/shallow"; interface FloatingReviewsPanelProps { executionId?: string; @@ -34,6 +36,11 @@ export function FloatingReviewsPanel({ const executionStatus = executionDetails?.status === 200 ? executionDetails.data.status : undefined; + // Get graph execution status from the store (updated via WebSocket) + const graphExecutionStatus = useGraphStore( + useShallow((state) => state.graphExecutionStatus), + ); + const { pendingReviews, isLoading, refetch } = usePendingReviewsForExecution( executionId || "", ); @@ -44,6 +51,13 @@ export function FloatingReviewsPanel({ } }, [executionStatus, executionId, refetch]); + // Refetch when graph execution status changes to REVIEW + useEffect(() => { + if (graphExecutionStatus === AgentExecutionStatus.REVIEW && executionId) { + refetch(); + } + }, [graphExecutionStatus, executionId, refetch]); + if ( !executionId || (!isLoading && @@ -73,18 +87,17 @@ export function FloatingReviewsPanel({ )} {isOpen && ( -
-
-
- - Pending Reviews -
- -
+
+ -
+
{isLoading ? (
diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx index b5094dd4cd..3ac636060c 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx @@ -40,18 +40,18 @@ function extractReviewData(payload: unknown): { interface PendingReviewCardProps { review: PendingHumanReviewModel; onReviewDataChange: (nodeExecId: string, data: string) => void; - reviewMessage: string; - onReviewMessageChange: (nodeExecId: string, message: string) => void; - isDisabled: boolean; - onToggleDisabled: (nodeExecId: string) => void; + reviewMessage?: string; + onReviewMessageChange?: (nodeExecId: string, message: string) => void; + isDisabled?: boolean; + onToggleDisabled?: (nodeExecId: string) => void; } export function PendingReviewCard({ review, onReviewDataChange, - reviewMessage, + reviewMessage = "", onReviewMessageChange, - isDisabled, + isDisabled = false, onToggleDisabled, }: PendingReviewCardProps) { const extractedData = extractReviewData(review.payload); @@ -65,9 +65,12 @@ export function PendingReviewCard({ }; const handleMessageChange = (newMessage: string) => { - onReviewMessageChange(review.node_exec_id, newMessage); + onReviewMessageChange?.(review.node_exec_id, newMessage); }; + // Show simplified view when no toggle functionality is provided (Screenshot 1 mode) + const showSimplified = !onToggleDisabled; + const renderDataInput = () => { const data = currentData; @@ -134,60 +137,80 @@ export function PendingReviewCard({ } }; - return ( -
-
-
- {isDisabled && ( - - This item will be rejected - - )} -
- -
+ // Helper function to get proper field label + const getFieldLabel = (instructions?: string) => { + if (instructions) + return instructions.charAt(0).toUpperCase() + instructions.slice(1); + return "Data to Review"; + }; - {instructions && ( -
- - Instructions: - - {instructions} + // Use the existing HITL review interface + return ( +
+ {!showSimplified && ( +
+
+ {isDisabled && ( + + This item will be rejected + + )} +
+
)} -
- - Data to Review: - {!isDataEditable && ( - - (Read-only) - + {/* Show instructions as field label */} + {instructions && ( +
+ + {getFieldLabel(instructions)} + + {isDataEditable && !isDisabled ? ( + renderDataInput() + ) : ( +
+ + {JSON.stringify(currentData, null, 2)} + +
)} - - {isDataEditable && !isDisabled ? ( - renderDataInput() - ) : ( -
- - {JSON.stringify(currentData, null, 2)} - -
- )} -
+
+ )} - {isDisabled && ( + {/* If no instructions, show data directly */} + {!instructions && ( +
+ + Data to Review + {!isDataEditable && ( + + (Read-only) + + )} + + {isDataEditable && !isDisabled ? ( + renderDataInput() + ) : ( +
+ + {JSON.stringify(currentData, null, 2)} + +
+ )} +
+ )} + + {!showSimplified && isDisabled && (
Rejection Reason (Optional): diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx index 320d84e91f..ddc9bab972 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx @@ -4,7 +4,7 @@ import { PendingReviewCard } from "@/components/organisms/PendingReviewCard/Pend import { Text } from "@/components/atoms/Text/Text"; import { Button } from "@/components/atoms/Button/Button"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { ClockIcon, PlayIcon, XIcon, CheckIcon } from "@phosphor-icons/react"; +import { ClockIcon, WarningIcon } from "@phosphor-icons/react"; import { usePostV2ProcessReviewAction } from "@/app/api/__generated__/endpoints/executions/executions"; interface PendingReviewsListProps { @@ -35,9 +35,10 @@ export function PendingReviewsList({ const [reviewMessageMap, setReviewMessageMap] = useState< Record >({}); - const [disabledReviews, setDisabledReviews] = useState>( - new Set(), - ); + + const [pendingAction, setPendingAction] = useState< + "approve" | "reject" | null + >(null); const { toast } = useToast(); @@ -69,9 +70,11 @@ export function PendingReviewsList({ }); } + setPendingAction(null); onReviewComplete?.(); }, onError: (error: Error) => { + setPendingAction(null); toast({ title: "Failed to process reviews", description: error.message || "An error occurred", @@ -89,28 +92,7 @@ export function PendingReviewsList({ setReviewMessageMap((prev) => ({ ...prev, [nodeExecId]: message })); } - function handleToggleDisabled(nodeExecId: string) { - setDisabledReviews((prev) => { - const newSet = new Set(prev); - if (newSet.has(nodeExecId)) { - newSet.delete(nodeExecId); - } else { - newSet.add(nodeExecId); - } - return newSet; - }); - } - - function handleApproveAll() { - setDisabledReviews(new Set()); - } - - function handleRejectAll() { - const allReviewIds = reviews.map((review) => review.node_exec_id); - setDisabledReviews(new Set(allReviewIds)); - } - - function handleContinue() { + function processReviews(approved: boolean) { if (reviews.length === 0) { toast({ title: "No reviews to process", @@ -120,34 +102,34 @@ export function PendingReviewsList({ return; } + setPendingAction(approved ? "approve" : "reject"); const reviewItems = []; for (const review of reviews) { - const isApproved = !disabledReviews.has(review.node_exec_id); const reviewData = reviewDataMap[review.node_exec_id]; const reviewMessage = reviewMessageMap[review.node_exec_id]; - let parsedData; - if (isApproved && review.editable && reviewData) { + let parsedData: any = review.payload; // Default to original payload + + // Parse edited data if available and editable + if (review.editable && reviewData) { try { parsedData = JSON.parse(reviewData); - if (JSON.stringify(parsedData) === JSON.stringify(review.payload)) { - parsedData = undefined; - } } catch (error) { toast({ title: "Invalid JSON", description: `Please fix the JSON format in review for node ${review.node_exec_id}: ${error instanceof Error ? error.message : "Invalid syntax"}`, variant: "destructive", }); + setPendingAction(null); return; } } reviewItems.push({ node_exec_id: review.node_exec_id, - approved: isApproved, - reviewed_data: isApproved ? parsedData : undefined, + approved, + reviewed_data: parsedData, message: reviewMessage || undefined, }); } @@ -175,71 +157,67 @@ export function PendingReviewsList({ } return ( -
-
+
+ {/* Warning Box Header */} +
+
+ + + Your review is needed + +
+ + This task is paused until you approve the changes below. Please review + and edit if needed. + +
+ +
{reviews.map((review) => ( ))}
-
-
- - -
+
+ + Note: Changes you make here apply only to this task + -
-
- - {disabledReviews.size > 0 ? ( - <> - Approve {reviews.length - disabledReviews.size}, reject{" "} - {disabledReviews.size} of {reviews.length} items - - ) : ( - <>Approve all {reviews.length} items - )} - -
+
+
diff --git a/autogpt_platform/frontend/src/components/styles/colors.ts b/autogpt_platform/frontend/src/components/styles/colors.ts index 4e37b9bdd5..cfb25ac107 100644 --- a/autogpt_platform/frontend/src/components/styles/colors.ts +++ b/autogpt_platform/frontend/src/components/styles/colors.ts @@ -48,13 +48,15 @@ export const colors = { 900: "#6b3900", }, yellow: { + 25: "#FFFCF3", 50: "#fef9e6", 100: "#fcebb0", + 150: "#FDEFBF", 200: "#fae28a", 300: "#f8d554", 400: "#f7cd33", 500: "#f5c000", - 600: "#dfaf00", + 600: "#DFAF00", 700: "#ae8800", 800: "#876a00", 900: "#675100", diff --git a/autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts similarity index 65% rename from autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx rename to autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts index a10367177f..654ef858b6 100644 --- a/autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx +++ b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts @@ -1,22 +1,14 @@ +import { useCallback, useState, useEffect } from "react"; import { usePatchV1UpdateGraphSettings } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV2GetLibraryAgentQueryOptions, useGetV2GetLibraryAgentByGraphId, } from "@/app/api/__generated__/endpoints/library/library"; +import { useToast } from "@/components/molecules/Toast/use-toast"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import { Button } from "@/components/atoms/Button/Button"; -import { - Tooltip, - TooltipContent, - TooltipTrigger, -} from "@/components/atoms/Tooltip/BaseTooltip"; -import { useToast } from "@/components/molecules/Toast/use-toast"; -import { Graph } from "@/lib/autogpt-server-api/types"; -import { cn } from "@/lib/utils"; -import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; import { useQueryClient } from "@tanstack/react-query"; -import { useCallback, useEffect, useState } from "react"; +import { Graph } from "@/lib/autogpt-server-api/types"; function getGraphId(graph: GraphModel | LibraryAgent | Graph): string { if ("graph_id" in graph) return graph.graph_id || ""; @@ -41,19 +33,7 @@ function isLibraryAgent( return "graph_id" in graph && "settings" in graph; } -interface FloatingSafeModeToggleProps { - graph: GraphModel | LibraryAgent | Graph; - className?: string; - fullWidth?: boolean; - variant?: "white" | "black"; -} - -export function FloatingSafeModeToggle({ - graph, - className, - fullWidth = false, - variant = "white", -}: FloatingSafeModeToggleProps) { +export function useAgentSafeMode(graph: GraphModel | LibraryAgent | Graph) { const { toast } = useToast(); const queryClient = useQueryClient(); @@ -120,6 +100,7 @@ export function FloatingSafeModeToggle({ description: newSafeMode ? "Human-in-the-loop blocks will require manual review" : "Human-in-the-loop blocks will proceed automatically", + duration: 2000, }); } catch (error) { const isNotFoundError = @@ -154,53 +135,12 @@ export function FloatingSafeModeToggle({ toast, ]); - if (!shouldShowToggle || isStateUndetermined) { - return null; - } - - return ( -
- - - - - -
-
- Safe Mode: {currentSafeMode! ? "ON" : "OFF"} -
-
- {currentSafeMode! - ? "HITL blocks require manual review" - : "HITL blocks proceed automatically"} -
-
-
-
-
- ); + return { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + hasHITLBlocks: shouldShowToggle, + }; } diff --git a/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts b/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts index 9af2b8aead..ac24396a20 100644 --- a/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts +++ b/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts @@ -63,21 +63,16 @@ export function useExecutionEvents({ if (subscribedIds.has(id)) return; subscribedIds.add(id); - api - .subscribeToGraphExecutions(id as GraphID) - .then(() => { - console.debug(`Subscribed to execution updates for graph ${id}`); - }) - .catch((error) => { - console.error( - `Failed to subscribe to execution updates for graph ${id}:`, - error, - ); - Sentry.captureException(error, { - tags: { graphId: id }, - }); - subscribedIds.delete(id); + api.subscribeToGraphExecutions(id as GraphID).catch((error) => { + console.error( + `Failed to subscribe to execution updates for graph ${id}:`, + error, + ); + Sentry.captureException(error, { + tags: { graphId: id }, }); + subscribedIds.delete(id); + }); }); }); From de78d062a9e938bea2efed7505bf6f586c86919a Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sat, 20 Dec 2025 21:33:10 +0100 Subject: [PATCH 14/25] refactor(backend/api): Clean up API file structure (#11629) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We'll soon be needing a more feature-complete external API. To make way for this, I'm moving some files around so: - We can more easily create new versions of our external API - The file structure of our internal API is more homogeneous These changes are quite opinionated, but IMO in any case they're better than the chaotic structure we have now. ### Changes 🏗️ - Move `backend/server` -> `backend/api` - Move `backend/server/routers` + `backend/server/v2` -> `backend/api/features` - Change absolute sibling imports to relative imports - Move `backend/server/v2/AutoMod` -> `backend/executor/automod` - Combine `backend/server/routers/analytics_*test.py` -> `backend/api/features/analytics_test.py` - Sort OpenAPI spec file ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - CI tests - [x] Clicking around in the app -> no obvious breakage --- .../autogpt_libs/autogpt_libs/auth/helpers.py | 12 +- autogpt_platform/backend/TESTING.md | 4 +- .../backend/{server => api}/__init__.py | 0 .../backend/{server => api}/conftest.py | 0 .../backend/{server => api}/conn_manager.py | 2 +- .../{server => api}/conn_manager_test.py | 4 +- .../backend/api/external/fastapi_app.py | 25 + .../{server => api}/external/middleware.py | 0 .../routes => api/external/v1}/__init__.py | 0 .../external/v1}/integrations.py | 4 +- .../v1.py => api/external/v1/routes.py} | 12 +- .../routes => api/external/v1}/tools.py | 8 +- .../postmark => api/features}/__init__.py | 0 .../features/admin}/__init__.py | 0 .../features}/admin/credit_admin_routes.py | 3 +- .../admin/credit_admin_routes_test.py | 33 +- .../admin/execution_analytics_routes.py | 0 .../v2 => api/features}/admin/model.py | 0 .../features}/admin/store_admin_routes.py | 24 +- .../routers => api/features}/analytics.py | 3 +- .../backend/api/features/analytics_test.py | 340 + .../features/builder}/__init__.py | 0 .../{server/v2 => api/features}/builder/db.py | 15 +- .../v2 => api/features}/builder/model.py | 4 +- .../v2 => api/features}/builder/routes.py | 5 +- .../backend/api/features/chat/__init__.py | 0 .../v2 => api/features}/chat/config.py | 0 .../{server/v2 => api/features}/chat/model.py | 3 +- .../v2 => api/features}/chat/model_test.py | 2 +- .../features}/chat/prompts/chat_system.md | 0 .../features}/chat/response_model.py | 0 .../v2 => api/features}/chat/routes.py | 5 +- .../v2 => api/features}/chat/service.py | 13 +- .../v2 => api/features}/chat/service_test.py | 4 +- .../features}/chat/tools/__init__.py | 4 +- .../features}/chat/tools/_test_data.py | 4 +- .../v2 => api/features}/chat/tools/base.py | 4 +- .../features}/chat/tools/find_agent.py | 11 +- .../v2 => api/features}/chat/tools/models.py | 0 .../features}/chat/tools/run_agent.py | 23 +- .../features}/chat/tools/run_agent_test.py | 4 +- .../v2 => api/features}/chat/tools/utils.py | 6 +- .../api/features/executions/__init__.py | 0 .../features/executions/review/__init__.py | 0 .../features}/executions/review/model.py | 0 .../executions/review/review_routes_test.py | 49 +- .../features}/executions/review/routes.py | 7 +- .../api/features/integrations/__init__.py | 0 .../features}/integrations/models.py | 0 .../features}/integrations/router.py | 11 +- .../backend/api/features/library/__init__.py | 0 .../{server/v2 => api/features}/library/db.py | 10 +- .../v2 => api/features}/library/db_test.py | 17 +- .../v2 => api/features}/library/model.py | 0 .../v2 => api/features}/library/model_test.py | 2 +- .../features}/library/routes/__init__.py | 0 .../features}/library/routes/agents.py | 7 +- .../features}/library/routes/presets.py | 5 +- .../features}/library/routes_test.py | 19 +- .../{server/routers => api/features}/oauth.py | 4 +- .../routers => api/features}/oauth_test.py | 2 +- .../backend/api/features/otto/__init__.py | 0 .../v2 => api/features}/otto/models.py | 0 .../v2 => api/features}/otto/routes.py | 0 .../v2 => api/features}/otto/routes_test.py | 6 +- .../v2 => api/features}/otto/service.py | 0 .../backend/api/features/postmark/__init__.py | 0 .../features}/postmark/models.py | 0 .../features}/postmark/postmark.py | 7 +- .../v2 => api/features}/store/README.md | 0 .../backend/api/features/store/__init__.py | 0 .../v2 => api/features}/store/cache.py | 13 +- .../{server/v2 => api/features}/store/db.py | 173 +- .../v2 => api/features}/store/db_test.py | 4 +- .../v2 => api/features}/store/exceptions.py | 0 .../v2 => api/features}/store/image_gen.py | 0 .../v2 => api/features}/store/media.py | 45 +- .../v2 => api/features}/store/media_test.py | 50 +- .../v2 => api/features}/store/model.py | 0 .../v2 => api/features}/store/model_test.py | 34 +- .../v2 => api/features}/store/routes.py | 86 +- .../v2 => api/features}/store/routes_test.py | 154 +- .../features}/store/test_cache_delete.py | 11 +- .../{server/routers => api/features}/v1.py | 155 +- .../routers => api/features}/v1_test.py | 60 +- .../{server => api}/middleware/security.py | 0 .../middleware/security_test.py | 2 +- .../backend/backend/{server => api}/model.py | 0 .../backend/{server => api}/rest_api.py | 122 +- .../backend/{server => api}/test_helpers.py | 0 .../{server => api}/utils/api_key_auth.py | 0 .../utils/api_key_auth_test.py | 2 +- .../backend/{server => api}/utils/cors.py | 0 .../{server => api}/utils/cors_test.py | 2 +- .../backend/backend/api/utils/openapi.py | 41 + .../backend/backend/{server => api}/ws_api.py | 16 +- .../backend/{server => api}/ws_api_test.py | 26 +- autogpt_platform/backend/backend/app.py | 4 +- .../blocks/test/test_smart_decision_maker.py | 4 +- autogpt_platform/backend/backend/cli.py | 6 +- .../backend/cli/generate_openapi_json.py | 4 +- .../backend/backend/data/__init__.py | 2 +- .../backend/backend/data/credit.py | 2 +- autogpt_platform/backend/backend/data/db.py | 2 +- .../backend/backend/data/graph_test.py | 4 +- .../backend/backend/data/human_review.py | 2 +- .../backend/backend/data/integrations.py | 6 +- .../backend/backend/data/notification_bus.py | 2 +- .../backend/backend/data/onboarding.py | 4 +- .../AutoMod => executor/automod}/__init__.py | 0 .../AutoMod => executor/automod}/manager.py | 7 +- .../v2/AutoMod => executor/automod}/models.py | 0 .../backend/backend/executor/database.py | 7 +- .../backend/backend/executor/manager.py | 36 +- .../backend/backend/executor/manager_test.py | 18 +- .../backend/executor/scheduler_test.py | 2 +- .../backend/integrations/webhooks/utils.py | 4 +- autogpt_platform/backend/backend/rest.py | 2 +- .../backend/backend/server/external/api.py | 29 - .../server/routers/analytics_improved_test.py | 150 - .../routers/analytics_parametrized_test.py | 115 - .../backend/server/routers/analytics_test.py | 284 - autogpt_platform/backend/backend/util/test.py | 2 +- .../backend/backend/util/virus_scanner.py | 2 +- .../backend/util/virus_scanner_test.py | 2 +- autogpt_platform/backend/backend/ws.py | 2 +- .../backend/test/e2e_test_data.py | 12 +- .../app/(platform)/auth/authorize/page.tsx | 2 +- .../frontend/src/app/api/openapi.json | 8817 +++++++++-------- 129 files changed, 5527 insertions(+), 5731 deletions(-) rename autogpt_platform/backend/backend/{server => api}/__init__.py (100%) rename autogpt_platform/backend/backend/{server => api}/conftest.py (100%) rename autogpt_platform/backend/backend/{server => api}/conn_manager.py (98%) rename autogpt_platform/backend/backend/{server => api}/conn_manager_test.py (98%) create mode 100644 autogpt_platform/backend/backend/api/external/fastapi_app.py rename autogpt_platform/backend/backend/{server => api}/external/middleware.py (100%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/integrations.py (99%) rename autogpt_platform/backend/backend/{server/external/routes/v1.py => api/external/v1/routes.py} (96%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/tools.py (94%) rename autogpt_platform/backend/backend/{server/routers/postmark => api/features}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2/library => api/features/admin}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/credit_admin_routes.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/credit_admin_routes_test.py (90%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/execution_analytics_routes.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/store_admin_routes.py (84%) rename autogpt_platform/backend/backend/{server/routers => api/features}/analytics.py (94%) create mode 100644 autogpt_platform/backend/backend/api/features/analytics_test.py rename autogpt_platform/backend/backend/{server/v2/store => api/features/builder}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/db.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/model.py (93%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/routes.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/chat/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/config.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/model.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/model_test.py (97%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/prompts/chat_system.md (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/response_model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/routes.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/service.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/service_test.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/__init__.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/_test_data.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/base.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/find_agent.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/models.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/run_agent.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/run_agent_test.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/utils.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/executions/__init__.py create mode 100644 autogpt_platform/backend/backend/api/features/executions/review/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/review_routes_test.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/routes.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/integrations/__init__.py rename autogpt_platform/backend/backend/{server => api/features}/integrations/models.py (100%) rename autogpt_platform/backend/backend/{server => api/features}/integrations/router.py (99%) create mode 100644 autogpt_platform/backend/backend/api/features/library/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/db.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/db_test.py (94%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/model_test.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/agents.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/presets.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes_test.py (93%) rename autogpt_platform/backend/backend/{server/routers => api/features}/oauth.py (99%) rename autogpt_platform/backend/backend/{server/routers => api/features}/oauth_test.py (99%) create mode 100644 autogpt_platform/backend/backend/api/features/otto/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/models.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/routes.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/routes_test.py (97%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/service.py (100%) create mode 100644 autogpt_platform/backend/backend/api/features/postmark/__init__.py rename autogpt_platform/backend/backend/{server/routers => api/features}/postmark/models.py (100%) rename autogpt_platform/backend/backend/{server/routers => api/features}/postmark/postmark.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/README.md (100%) create mode 100644 autogpt_platform/backend/backend/api/features/store/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/cache.py (85%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/db.py (92%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/db_test.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/exceptions.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/image_gen.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/media.py (81%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/media_test.py (75%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/model_test.py (83%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/routes.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/routes_test.py (76%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/test_cache_delete.py (96%) rename autogpt_platform/backend/backend/{server/routers => api/features}/v1.py (98%) rename autogpt_platform/backend/backend/{server/routers => api/features}/v1_test.py (91%) rename autogpt_platform/backend/backend/{server => api}/middleware/security.py (100%) rename autogpt_platform/backend/backend/{server => api}/middleware/security_test.py (98%) rename autogpt_platform/backend/backend/{server => api}/model.py (100%) rename autogpt_platform/backend/backend/{server => api}/rest_api.py (78%) rename autogpt_platform/backend/backend/{server => api}/test_helpers.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/api_key_auth.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/api_key_auth_test.py (99%) rename autogpt_platform/backend/backend/{server => api}/utils/cors.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/cors_test.py (97%) create mode 100644 autogpt_platform/backend/backend/api/utils/openapi.py rename autogpt_platform/backend/backend/{server => api}/ws_api.py (98%) rename autogpt_platform/backend/backend/{server => api}/ws_api_test.py (92%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/manager.py (99%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/models.py (100%) delete mode 100644 autogpt_platform/backend/backend/server/external/api.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_improved_test.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_test.py diff --git a/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py b/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py index d3d571d73c..10101778e7 100644 --- a/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py +++ b/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py @@ -1,29 +1,25 @@ from fastapi import FastAPI -from fastapi.openapi.utils import get_openapi from .jwt_utils import bearer_jwt_auth def add_auth_responses_to_openapi(app: FastAPI) -> None: """ - Set up custom OpenAPI schema generation that adds 401 responses + Patch a FastAPI instance's `openapi()` method to add 401 responses to all authenticated endpoints. This is needed when using HTTPBearer with auto_error=False to get proper 401 responses instead of 403, but FastAPI only automatically adds security responses when auto_error=True. """ + # Wrap current method to allow stacking OpenAPI schema modifiers like this + wrapped_openapi = app.openapi def custom_openapi(): if app.openapi_schema: return app.openapi_schema - openapi_schema = get_openapi( - title=app.title, - version=app.version, - description=app.description, - routes=app.routes, - ) + openapi_schema = wrapped_openapi() # Add 401 response to all endpoints that have security requirements for path, methods in openapi_schema["paths"].items(): diff --git a/autogpt_platform/backend/TESTING.md b/autogpt_platform/backend/TESTING.md index 39fe4611b4..a3a5db68ef 100644 --- a/autogpt_platform/backend/TESTING.md +++ b/autogpt_platform/backend/TESTING.md @@ -108,7 +108,7 @@ import fastapi.testclient import pytest from pytest_snapshot.plugin import Snapshot -from backend.server.v2.myroute import router +from backend.api.features.myroute import router app = fastapi.FastAPI() app.include_router(router) @@ -149,7 +149,7 @@ These provide the easiest way to set up authentication mocking in test modules: import fastapi import fastapi.testclient import pytest -from backend.server.v2.myroute import router +from backend.api.features.myroute import router app = fastapi.FastAPI() app.include_router(router) diff --git a/autogpt_platform/backend/backend/server/__init__.py b/autogpt_platform/backend/backend/api/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/__init__.py rename to autogpt_platform/backend/backend/api/__init__.py diff --git a/autogpt_platform/backend/backend/server/conftest.py b/autogpt_platform/backend/backend/api/conftest.py similarity index 100% rename from autogpt_platform/backend/backend/server/conftest.py rename to autogpt_platform/backend/backend/api/conftest.py diff --git a/autogpt_platform/backend/backend/server/conn_manager.py b/autogpt_platform/backend/backend/api/conn_manager.py similarity index 98% rename from autogpt_platform/backend/backend/server/conn_manager.py rename to autogpt_platform/backend/backend/api/conn_manager.py index 8d65117564..52e0f50f69 100644 --- a/autogpt_platform/backend/backend/server/conn_manager.py +++ b/autogpt_platform/backend/backend/api/conn_manager.py @@ -3,12 +3,12 @@ from typing import Dict, Set from fastapi import WebSocket +from backend.api.model import NotificationPayload, WSMessage, WSMethod from backend.data.execution import ( ExecutionEventType, GraphExecutionEvent, NodeExecutionEvent, ) -from backend.server.model import NotificationPayload, WSMessage, WSMethod _EVENT_TYPE_TO_METHOD_MAP: dict[ExecutionEventType, WSMethod] = { ExecutionEventType.GRAPH_EXEC_UPDATE: WSMethod.GRAPH_EXECUTION_EVENT, diff --git a/autogpt_platform/backend/backend/server/conn_manager_test.py b/autogpt_platform/backend/backend/api/conn_manager_test.py similarity index 98% rename from autogpt_platform/backend/backend/server/conn_manager_test.py rename to autogpt_platform/backend/backend/api/conn_manager_test.py index 379928fae7..71dbc0ffee 100644 --- a/autogpt_platform/backend/backend/server/conn_manager_test.py +++ b/autogpt_platform/backend/backend/api/conn_manager_test.py @@ -4,13 +4,13 @@ from unittest.mock import AsyncMock import pytest from fastapi import WebSocket +from backend.api.conn_manager import ConnectionManager +from backend.api.model import NotificationPayload, WSMessage, WSMethod from backend.data.execution import ( ExecutionStatus, GraphExecutionEvent, NodeExecutionEvent, ) -from backend.server.conn_manager import ConnectionManager -from backend.server.model import NotificationPayload, WSMessage, WSMethod @pytest.fixture diff --git a/autogpt_platform/backend/backend/api/external/fastapi_app.py b/autogpt_platform/backend/backend/api/external/fastapi_app.py new file mode 100644 index 0000000000..b55c918a74 --- /dev/null +++ b/autogpt_platform/backend/backend/api/external/fastapi_app.py @@ -0,0 +1,25 @@ +from fastapi import FastAPI + +from backend.api.middleware.security import SecurityHeadersMiddleware +from backend.monitoring.instrumentation import instrument_fastapi + +from .v1.routes import v1_router + +external_api = FastAPI( + title="AutoGPT External API", + description="External API for AutoGPT integrations", + docs_url="/docs", + version="1.0", +) + +external_api.add_middleware(SecurityHeadersMiddleware) +external_api.include_router(v1_router, prefix="/v1") + +# Add Prometheus instrumentation +instrument_fastapi( + external_api, + service_name="external-api", + expose_endpoint=True, + endpoint="/metrics", + include_in_schema=True, +) diff --git a/autogpt_platform/backend/backend/server/external/middleware.py b/autogpt_platform/backend/backend/api/external/middleware.py similarity index 100% rename from autogpt_platform/backend/backend/server/external/middleware.py rename to autogpt_platform/backend/backend/api/external/middleware.py diff --git a/autogpt_platform/backend/backend/server/external/routes/__init__.py b/autogpt_platform/backend/backend/api/external/v1/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/external/routes/__init__.py rename to autogpt_platform/backend/backend/api/external/v1/__init__.py diff --git a/autogpt_platform/backend/backend/server/external/routes/integrations.py b/autogpt_platform/backend/backend/api/external/v1/integrations.py similarity index 99% rename from autogpt_platform/backend/backend/server/external/routes/integrations.py rename to autogpt_platform/backend/backend/api/external/v1/integrations.py index f9a8875ada..a3df481a67 100644 --- a/autogpt_platform/backend/backend/server/external/routes/integrations.py +++ b/autogpt_platform/backend/backend/api/external/v1/integrations.py @@ -16,6 +16,8 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field, SecretStr +from backend.api.external.middleware import require_permission +from backend.api.features.integrations.models import get_all_provider_names from backend.data.auth.base import APIAuthorizationInfo from backend.data.model import ( APIKeyCredentials, @@ -28,8 +30,6 @@ from backend.data.model import ( from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME from backend.integrations.providers import ProviderName -from backend.server.external.middleware import require_permission -from backend.server.integrations.models import get_all_provider_names from backend.util.settings import Settings if TYPE_CHECKING: diff --git a/autogpt_platform/backend/backend/server/external/routes/v1.py b/autogpt_platform/backend/backend/api/external/v1/routes.py similarity index 96% rename from autogpt_platform/backend/backend/server/external/routes/v1.py rename to autogpt_platform/backend/backend/api/external/v1/routes.py index f83673465a..58e15dc6a3 100644 --- a/autogpt_platform/backend/backend/server/external/routes/v1.py +++ b/autogpt_platform/backend/backend/api/external/v1/routes.py @@ -8,23 +8,29 @@ from prisma.enums import AgentExecutionStatus, APIKeyPermission from pydantic import BaseModel, Field from typing_extensions import TypedDict +import backend.api.features.store.cache as store_cache +import backend.api.features.store.model as store_model import backend.data.block -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.model as store_model +from backend.api.external.middleware import require_permission from backend.data import execution as execution_db from backend.data import graph as graph_db from backend.data import user as user_db from backend.data.auth.base import APIAuthorizationInfo from backend.data.block import BlockInput, CompletedBlockOutput from backend.executor.utils import add_graph_execution -from backend.server.external.middleware import require_permission from backend.util.settings import Settings +from .integrations import integrations_router +from .tools import tools_router + settings = Settings() logger = logging.getLogger(__name__) v1_router = APIRouter() +v1_router.include_router(integrations_router) +v1_router.include_router(tools_router) + class UserInfoResponse(BaseModel): id: str diff --git a/autogpt_platform/backend/backend/server/external/routes/tools.py b/autogpt_platform/backend/backend/api/external/v1/tools.py similarity index 94% rename from autogpt_platform/backend/backend/server/external/routes/tools.py rename to autogpt_platform/backend/backend/api/external/v1/tools.py index 8e3f4cbfdb..9e362fb32c 100644 --- a/autogpt_platform/backend/backend/server/external/routes/tools.py +++ b/autogpt_platform/backend/backend/api/external/v1/tools.py @@ -14,11 +14,11 @@ from fastapi import APIRouter, Security from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field +from backend.api.external.middleware import require_permission +from backend.api.features.chat.model import ChatSession +from backend.api.features.chat.tools import find_agent_tool, run_agent_tool +from backend.api.features.chat.tools.models import ToolResponseBase from backend.data.auth.base import APIAuthorizationInfo -from backend.server.external.middleware import require_permission -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool -from backend.server.v2.chat.tools.models import ToolResponseBase logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/routers/postmark/__init__.py b/autogpt_platform/backend/backend/api/features/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/routers/postmark/__init__.py rename to autogpt_platform/backend/backend/api/features/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/library/__init__.py b/autogpt_platform/backend/backend/api/features/admin/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/__init__.py rename to autogpt_platform/backend/backend/api/features/admin/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py rename to autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py index e4ea2c7f32..8930172c7f 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py +++ b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py @@ -6,9 +6,10 @@ from fastapi import APIRouter, Body, Security from prisma.enums import CreditTransactionType from backend.data.credit import admin_get_user_history, get_user_credit_model -from backend.server.v2.admin.model import AddUserCreditsResponse, UserHistoryResponse from backend.util.json import SafeJson +from .model import AddUserCreditsResponse, UserHistoryResponse + logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py similarity index 90% rename from autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py rename to autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py index 0248da352f..db2d3cb41a 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py @@ -9,14 +9,15 @@ import pytest_mock from autogpt_libs.auth.jwt_utils import get_jwt_payload from pytest_snapshot.plugin import Snapshot -import backend.server.v2.admin.credit_admin_routes as credit_admin_routes -import backend.server.v2.admin.model as admin_model from backend.data.model import UserTransaction from backend.util.json import SafeJson from backend.util.models import Pagination +from .credit_admin_routes import router as credit_admin_router +from .model import UserHistoryResponse + app = fastapi.FastAPI() -app.include_router(credit_admin_routes.router) +app.include_router(credit_admin_router) client = fastapi.testclient.TestClient(app) @@ -30,7 +31,7 @@ def setup_app_admin_auth(mock_jwt_admin): def test_add_user_credits_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, configured_snapshot: Snapshot, admin_user_id: str, target_user_id: str, @@ -42,7 +43,7 @@ def test_add_user_credits_success( return_value=(1500, "transaction-123-uuid") ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.get_user_credit_model", + "backend.api.features.admin.credit_admin_routes.get_user_credit_model", return_value=mock_credit_model, ) @@ -84,7 +85,7 @@ def test_add_user_credits_success( def test_add_user_credits_negative_amount( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test credit deduction by admin (negative amount)""" @@ -94,7 +95,7 @@ def test_add_user_credits_negative_amount( return_value=(200, "transaction-456-uuid") ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.get_user_credit_model", + "backend.api.features.admin.credit_admin_routes.get_user_credit_model", return_value=mock_credit_model, ) @@ -119,12 +120,12 @@ def test_add_user_credits_negative_amount( def test_get_user_history_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test successful retrieval of user credit history""" # Mock the admin_get_user_history function - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[ UserTransaction( user_id="user-1", @@ -150,7 +151,7 @@ def test_get_user_history_success( ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) @@ -170,12 +171,12 @@ def test_get_user_history_success( def test_get_user_history_with_filters( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test user credit history with search and filter parameters""" # Mock the admin_get_user_history function - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[ UserTransaction( user_id="user-3", @@ -194,7 +195,7 @@ def test_get_user_history_with_filters( ) mock_get_history = mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) @@ -230,12 +231,12 @@ def test_get_user_history_with_filters( def test_get_user_history_empty_results( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test user credit history with no results""" # Mock empty history response - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[], pagination=Pagination( total_items=0, @@ -246,7 +247,7 @@ def test_get_user_history_empty_results( ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) diff --git a/autogpt_platform/backend/backend/server/v2/admin/execution_analytics_routes.py b/autogpt_platform/backend/backend/api/features/admin/execution_analytics_routes.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/admin/execution_analytics_routes.py rename to autogpt_platform/backend/backend/api/features/admin/execution_analytics_routes.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/model.py b/autogpt_platform/backend/backend/api/features/admin/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/admin/model.py rename to autogpt_platform/backend/backend/api/features/admin/model.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py b/autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py similarity index 84% rename from autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py rename to autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py index c611c43f5a..9c4b89fee6 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py +++ b/autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py @@ -7,9 +7,9 @@ import fastapi import fastapi.responses import prisma.enums -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.db -import backend.server.v2.store.model +import backend.api.features.store.cache as store_cache +import backend.api.features.store.db as store_db +import backend.api.features.store.model as store_model import backend.util.json logger = logging.getLogger(__name__) @@ -24,7 +24,7 @@ router = fastapi.APIRouter( @router.get( "/listings", summary="Get Admin Listings History", - response_model=backend.server.v2.store.model.StoreListingsWithVersionsResponse, + response_model=store_model.StoreListingsWithVersionsResponse, ) async def get_admin_listings_with_versions( status: typing.Optional[prisma.enums.SubmissionStatus] = None, @@ -48,7 +48,7 @@ async def get_admin_listings_with_versions( StoreListingsWithVersionsResponse with listings and their versions """ try: - listings = await backend.server.v2.store.db.get_admin_listings_with_versions( + listings = await store_db.get_admin_listings_with_versions( status=status, search_query=search, page=page, @@ -68,11 +68,11 @@ async def get_admin_listings_with_versions( @router.post( "/submissions/{store_listing_version_id}/review", summary="Review Store Submission", - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def review_submission( store_listing_version_id: str, - request: backend.server.v2.store.model.ReviewSubmissionRequest, + request: store_model.ReviewSubmissionRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -87,12 +87,10 @@ async def review_submission( StoreSubmission with updated review information """ try: - already_approved = ( - await backend.server.v2.store.db.check_submission_already_approved( - store_listing_version_id=store_listing_version_id, - ) + already_approved = await store_db.check_submission_already_approved( + store_listing_version_id=store_listing_version_id, ) - submission = await backend.server.v2.store.db.review_store_submission( + submission = await store_db.review_store_submission( store_listing_version_id=store_listing_version_id, is_approved=request.is_approved, external_comments=request.comments, @@ -136,7 +134,7 @@ async def admin_download_agent_file( Raises: HTTPException: If the agent is not found or an unexpected error occurs. """ - graph_data = await backend.server.v2.store.db.get_agent_as_admin( + graph_data = await store_db.get_agent_as_admin( user_id=user_id, store_listing_version_id=store_listing_version_id, ) diff --git a/autogpt_platform/backend/backend/server/routers/analytics.py b/autogpt_platform/backend/backend/api/features/analytics.py similarity index 94% rename from autogpt_platform/backend/backend/server/routers/analytics.py rename to autogpt_platform/backend/backend/api/features/analytics.py index 98c2dd8e96..73a4590dcb 100644 --- a/autogpt_platform/backend/backend/server/routers/analytics.py +++ b/autogpt_platform/backend/backend/api/features/analytics.py @@ -6,10 +6,11 @@ from typing import Annotated import fastapi import pydantic from autogpt_libs.auth import get_user_id +from autogpt_libs.auth.dependencies import requires_user import backend.data.analytics -router = fastapi.APIRouter() +router = fastapi.APIRouter(dependencies=[fastapi.Security(requires_user)]) logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/analytics_test.py b/autogpt_platform/backend/backend/api/features/analytics_test.py new file mode 100644 index 0000000000..2493bdb7e4 --- /dev/null +++ b/autogpt_platform/backend/backend/api/features/analytics_test.py @@ -0,0 +1,340 @@ +"""Tests for analytics API endpoints.""" + +import json +from unittest.mock import AsyncMock, Mock + +import fastapi +import fastapi.testclient +import pytest +import pytest_mock +from pytest_snapshot.plugin import Snapshot + +from .analytics import router as analytics_router + +app = fastapi.FastAPI() +app.include_router(analytics_router) + +client = fastapi.testclient.TestClient(app) + + +@pytest.fixture(autouse=True) +def setup_app_auth(mock_jwt_user): + """Setup auth overrides for all tests in this module.""" + from autogpt_libs.auth.jwt_utils import get_jwt_payload + + app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] + yield + app.dependency_overrides.clear() + + +# ============================================================================= +# /log_raw_metric endpoint tests +# ============================================================================= + + +def test_log_raw_metric_success( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + test_user_id: str, +) -> None: + """Test successful raw metric logging.""" + mock_result = Mock(id="metric-123-uuid") + mock_log_metric = mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "metric_name": "page_load_time", + "metric_value": 2.5, + "data_string": "/dashboard", + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 200, f"Unexpected response: {response.text}" + assert response.json() == "metric-123-uuid" + + mock_log_metric.assert_called_once_with( + user_id=test_user_id, + metric_name="page_load_time", + metric_value=2.5, + data_string="/dashboard", + ) + + configured_snapshot.assert_match( + json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), + "analytics_log_metric_success", + ) + + +@pytest.mark.parametrize( + "metric_value,metric_name,data_string,test_id", + [ + (100, "api_calls_count", "external_api", "integer_value"), + (0, "error_count", "no_errors", "zero_value"), + (-5.2, "temperature_delta", "cooling", "negative_value"), + (1.23456789, "precision_test", "float_precision", "float_precision"), + (999999999, "large_number", "max_value", "large_number"), + (0.0000001, "tiny_number", "min_value", "tiny_number"), + ], +) +def test_log_raw_metric_various_values( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + metric_value: float, + metric_name: str, + data_string: str, + test_id: str, +) -> None: + """Test raw metric logging with various metric values.""" + mock_result = Mock(id=f"metric-{test_id}-uuid") + mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "metric_name": metric_name, + "metric_value": metric_value, + "data_string": data_string, + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 200, f"Failed for {test_id}: {response.text}" + + configured_snapshot.assert_match( + json.dumps( + {"metric_id": response.json(), "test_case": test_id}, + indent=2, + sort_keys=True, + ), + f"analytics_metric_{test_id}", + ) + + +@pytest.mark.parametrize( + "invalid_data,expected_error", + [ + ({}, "Field required"), + ({"metric_name": "test"}, "Field required"), + ( + {"metric_name": "test", "metric_value": "not_a_number", "data_string": "x"}, + "Input should be a valid number", + ), + ( + {"metric_name": "", "metric_value": 1.0, "data_string": "test"}, + "String should have at least 1 character", + ), + ( + {"metric_name": "test", "metric_value": 1.0, "data_string": ""}, + "String should have at least 1 character", + ), + ], + ids=[ + "empty_request", + "missing_metric_value_and_data_string", + "invalid_metric_value_type", + "empty_metric_name", + "empty_data_string", + ], +) +def test_log_raw_metric_validation_errors( + invalid_data: dict, + expected_error: str, +) -> None: + """Test validation errors for invalid metric requests.""" + response = client.post("/log_raw_metric", json=invalid_data) + + assert response.status_code == 422 + error_detail = response.json() + assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}" + + error_text = json.dumps(error_detail) + assert ( + expected_error in error_text + ), f"Expected '{expected_error}' in error response: {error_text}" + + +def test_log_raw_metric_service_error( + mocker: pytest_mock.MockFixture, + test_user_id: str, +) -> None: + """Test error handling when analytics service fails.""" + mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + side_effect=Exception("Database connection failed"), + ) + + request_data = { + "metric_name": "test_metric", + "metric_value": 1.0, + "data_string": "test", + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 500 + error_detail = response.json()["detail"] + assert "Database connection failed" in error_detail["message"] + assert "hint" in error_detail + + +# ============================================================================= +# /log_raw_analytics endpoint tests +# ============================================================================= + + +def test_log_raw_analytics_success( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + test_user_id: str, +) -> None: + """Test successful raw analytics logging.""" + mock_result = Mock(id="analytics-789-uuid") + mock_log_analytics = mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "type": "user_action", + "data": { + "action": "button_click", + "button_id": "submit_form", + "timestamp": "2023-01-01T00:00:00Z", + "metadata": {"form_type": "registration", "fields_filled": 5}, + }, + "data_index": "button_click_submit_form", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 200, f"Unexpected response: {response.text}" + assert response.json() == "analytics-789-uuid" + + mock_log_analytics.assert_called_once_with( + test_user_id, + "user_action", + request_data["data"], + "button_click_submit_form", + ) + + configured_snapshot.assert_match( + json.dumps({"analytics_id": response.json()}, indent=2, sort_keys=True), + "analytics_log_analytics_success", + ) + + +def test_log_raw_analytics_complex_data( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, +) -> None: + """Test raw analytics logging with complex nested data structures.""" + mock_result = Mock(id="analytics-complex-uuid") + mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "type": "agent_execution", + "data": { + "agent_id": "agent_123", + "execution_id": "exec_456", + "status": "completed", + "duration_ms": 3500, + "nodes_executed": 15, + "blocks_used": [ + {"block_id": "llm_block", "count": 3}, + {"block_id": "http_block", "count": 5}, + {"block_id": "code_block", "count": 2}, + ], + "errors": [], + "metadata": { + "trigger": "manual", + "user_tier": "premium", + "environment": "production", + }, + }, + "data_index": "agent_123_exec_456", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 200 + + configured_snapshot.assert_match( + json.dumps( + {"analytics_id": response.json(), "logged_data": request_data["data"]}, + indent=2, + sort_keys=True, + ), + "analytics_log_analytics_complex_data", + ) + + +@pytest.mark.parametrize( + "invalid_data,expected_error", + [ + ({}, "Field required"), + ({"type": "test"}, "Field required"), + ( + {"type": "test", "data": "not_a_dict", "data_index": "test"}, + "Input should be a valid dictionary", + ), + ({"type": "test", "data": {"key": "value"}}, "Field required"), + ], + ids=[ + "empty_request", + "missing_data_and_data_index", + "invalid_data_type", + "missing_data_index", + ], +) +def test_log_raw_analytics_validation_errors( + invalid_data: dict, + expected_error: str, +) -> None: + """Test validation errors for invalid analytics requests.""" + response = client.post("/log_raw_analytics", json=invalid_data) + + assert response.status_code == 422 + error_detail = response.json() + assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}" + + error_text = json.dumps(error_detail) + assert ( + expected_error in error_text + ), f"Expected '{expected_error}' in error response: {error_text}" + + +def test_log_raw_analytics_service_error( + mocker: pytest_mock.MockFixture, + test_user_id: str, +) -> None: + """Test error handling when analytics service fails.""" + mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + side_effect=Exception("Analytics DB unreachable"), + ) + + request_data = { + "type": "test_event", + "data": {"key": "value"}, + "data_index": "test_index", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 500 + error_detail = response.json()["detail"] + assert "Analytics DB unreachable" in error_detail["message"] + assert "hint" in error_detail diff --git a/autogpt_platform/backend/backend/server/v2/store/__init__.py b/autogpt_platform/backend/backend/api/features/builder/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/__init__.py rename to autogpt_platform/backend/backend/api/features/builder/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/builder/db.py b/autogpt_platform/backend/backend/api/features/builder/db.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/builder/db.py rename to autogpt_platform/backend/backend/api/features/builder/db.py index 9856d53c0e..7177fa4dc6 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/db.py +++ b/autogpt_platform/backend/backend/api/features/builder/db.py @@ -6,17 +6,20 @@ from typing import Sequence import prisma +import backend.api.features.library.db as library_db +import backend.api.features.library.model as library_model +import backend.api.features.store.db as store_db +import backend.api.features.store.model as store_model import backend.data.block -import backend.server.v2.library.db as library_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.db as store_db -import backend.server.v2.store.model as store_model from backend.blocks import load_all_blocks from backend.blocks.llm import LlmModel from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema from backend.data.db import query_raw_with_schema from backend.integrations.providers import ProviderName -from backend.server.v2.builder.model import ( +from backend.util.cache import cached +from backend.util.models import Pagination + +from .model import ( BlockCategoryResponse, BlockResponse, BlockType, @@ -26,8 +29,6 @@ from backend.server.v2.builder.model import ( ProviderResponse, SearchEntry, ) -from backend.util.cache import cached -from backend.util.models import Pagination logger = logging.getLogger(__name__) llm_models = [name.name.lower().replace("_", " ") for name in LlmModel] diff --git a/autogpt_platform/backend/backend/server/v2/builder/model.py b/autogpt_platform/backend/backend/api/features/builder/model.py similarity index 93% rename from autogpt_platform/backend/backend/server/v2/builder/model.py rename to autogpt_platform/backend/backend/api/features/builder/model.py index 4a1de595d1..fcd19dba94 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/model.py +++ b/autogpt_platform/backend/backend/api/features/builder/model.py @@ -2,8 +2,8 @@ from typing import Literal from pydantic import BaseModel -import backend.server.v2.library.model as library_model -import backend.server.v2.store.model as store_model +import backend.api.features.library.model as library_model +import backend.api.features.store.model as store_model from backend.data.block import BlockInfo from backend.integrations.providers import ProviderName from backend.util.models import Pagination diff --git a/autogpt_platform/backend/backend/server/v2/builder/routes.py b/autogpt_platform/backend/backend/api/features/builder/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/builder/routes.py rename to autogpt_platform/backend/backend/api/features/builder/routes.py index b87bf8ca1a..7fe9cab189 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/routes.py +++ b/autogpt_platform/backend/backend/api/features/builder/routes.py @@ -4,11 +4,12 @@ from typing import Annotated, Sequence import fastapi from autogpt_libs.auth.dependencies import get_user_id, requires_user -import backend.server.v2.builder.db as builder_db -import backend.server.v2.builder.model as builder_model from backend.integrations.providers import ProviderName from backend.util.models import Pagination +from . import db as builder_db +from . import model as builder_model + logger = logging.getLogger(__name__) router = fastapi.APIRouter( diff --git a/autogpt_platform/backend/backend/api/features/chat/__init__.py b/autogpt_platform/backend/backend/api/features/chat/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/chat/config.py b/autogpt_platform/backend/backend/api/features/chat/config.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/config.py rename to autogpt_platform/backend/backend/api/features/chat/config.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/model.py b/autogpt_platform/backend/backend/api/features/chat/model.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/model.py rename to autogpt_platform/backend/backend/api/features/chat/model.py index 0f06ddab00..b8aea5a334 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/model.py +++ b/autogpt_platform/backend/backend/api/features/chat/model.py @@ -19,9 +19,10 @@ from openai.types.chat.chat_completion_message_tool_call_param import ( from pydantic import BaseModel from backend.data.redis_client import get_redis_async -from backend.server.v2.chat.config import ChatConfig from backend.util.exceptions import RedisError +from .config import ChatConfig + logger = logging.getLogger(__name__) config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/model_test.py b/autogpt_platform/backend/backend/api/features/chat/model_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/v2/chat/model_test.py rename to autogpt_platform/backend/backend/api/features/chat/model_test.py index f9c79b331b..b7f4c8a493 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/model_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/model_test.py @@ -1,6 +1,6 @@ import pytest -from backend.server.v2.chat.model import ( +from .model import ( ChatMessage, ChatSession, Usage, diff --git a/autogpt_platform/backend/backend/server/v2/chat/prompts/chat_system.md b/autogpt_platform/backend/backend/api/features/chat/prompts/chat_system.md similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/prompts/chat_system.md rename to autogpt_platform/backend/backend/api/features/chat/prompts/chat_system.md diff --git a/autogpt_platform/backend/backend/server/v2/chat/response_model.py b/autogpt_platform/backend/backend/api/features/chat/response_model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/response_model.py rename to autogpt_platform/backend/backend/api/features/chat/response_model.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/routes.py b/autogpt_platform/backend/backend/api/features/chat/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/routes.py rename to autogpt_platform/backend/backend/api/features/chat/routes.py index 86bcf861c0..667335d048 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/routes.py +++ b/autogpt_platform/backend/backend/api/features/chat/routes.py @@ -9,10 +9,11 @@ from fastapi import APIRouter, Depends, Query, Security from fastapi.responses import StreamingResponse from pydantic import BaseModel -import backend.server.v2.chat.service as chat_service -from backend.server.v2.chat.config import ChatConfig from backend.util.exceptions import NotFoundError +from . import service as chat_service +from .config import ChatConfig + config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/service.py b/autogpt_platform/backend/backend/api/features/chat/service.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/service.py rename to autogpt_platform/backend/backend/api/features/chat/service.py index 4328deb016..2d96d4abcd 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/service.py +++ b/autogpt_platform/backend/backend/api/features/chat/service.py @@ -7,15 +7,17 @@ import orjson from openai import AsyncOpenAI from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam -import backend.server.v2.chat.config -from backend.server.v2.chat.model import ( +from backend.util.exceptions import NotFoundError + +from .config import ChatConfig +from .model import ( ChatMessage, ChatSession, Usage, get_chat_session, upsert_chat_session, ) -from backend.server.v2.chat.response_model import ( +from .response_model import ( StreamBaseResponse, StreamEnd, StreamError, @@ -26,12 +28,11 @@ from backend.server.v2.chat.response_model import ( StreamToolExecutionResult, StreamUsage, ) -from backend.server.v2.chat.tools import execute_tool, tools -from backend.util.exceptions import NotFoundError +from .tools import execute_tool, tools logger = logging.getLogger(__name__) -config = backend.server.v2.chat.config.ChatConfig() +config = ChatConfig() client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url) diff --git a/autogpt_platform/backend/backend/server/v2/chat/service_test.py b/autogpt_platform/backend/backend/api/features/chat/service_test.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/chat/service_test.py rename to autogpt_platform/backend/backend/api/features/chat/service_test.py index df3d64976e..d1af22a71a 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/service_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/service_test.py @@ -3,8 +3,8 @@ from os import getenv import pytest -import backend.server.v2.chat.service as chat_service -from backend.server.v2.chat.response_model import ( +from . import service as chat_service +from .response_model import ( StreamEnd, StreamError, StreamTextChunk, diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py b/autogpt_platform/backend/backend/api/features/chat/tools/__init__.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py rename to autogpt_platform/backend/backend/api/features/chat/tools/__init__.py index 50f0d9892b..5b9b8649a8 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/__init__.py @@ -2,14 +2,14 @@ from typing import TYPE_CHECKING, Any from openai.types.chat import ChatCompletionToolParam -from backend.server.v2.chat.model import ChatSession +from backend.api.features.chat.model import ChatSession from .base import BaseTool from .find_agent import FindAgentTool from .run_agent import RunAgentTool if TYPE_CHECKING: - from backend.server.v2.chat.response_model import StreamToolExecutionResult + from backend.api.features.chat.response_model import StreamToolExecutionResult # Initialize tool instances find_agent_tool = FindAgentTool() diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py b/autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py rename to autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py index 162894f5f1..f75b7bb0d0 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py @@ -5,6 +5,8 @@ from os import getenv import pytest from pydantic import SecretStr +from backend.api.features.chat.model import ChatSession +from backend.api.features.store import db as store_db from backend.blocks.firecrawl.scrape import FirecrawlScrapeBlock from backend.blocks.io import AgentInputBlock, AgentOutputBlock from backend.blocks.llm import AITextGeneratorBlock @@ -13,8 +15,6 @@ from backend.data.graph import Graph, Link, Node, create_graph from backend.data.model import APIKeyCredentials from backend.data.user import get_or_create_user from backend.integrations.credentials_store import IntegrationCredentialsStore -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.store import db as store_db def make_session(user_id: str | None = None): diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/base.py b/autogpt_platform/backend/backend/api/features/chat/tools/base.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/chat/tools/base.py rename to autogpt_platform/backend/backend/api/features/chat/tools/base.py index f6c4c06284..b4c9d8d731 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/base.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/base.py @@ -5,8 +5,8 @@ from typing import Any from openai.types.chat import ChatCompletionToolParam -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.response_model import StreamToolExecutionResult +from backend.api.features.chat.model import ChatSession +from backend.api.features.chat.response_model import StreamToolExecutionResult from .models import ErrorResponse, NeedLoginResponse, ToolResponseBase diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py b/autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py rename to autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py index 111041a8f4..3ad071f412 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py @@ -3,17 +3,18 @@ import logging from typing import Any -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools.base import BaseTool -from backend.server.v2.chat.tools.models import ( +from backend.api.features.chat.model import ChatSession +from backend.api.features.store import db as store_db +from backend.util.exceptions import DatabaseError, NotFoundError + +from .base import BaseTool +from .models import ( AgentCarouselResponse, AgentInfo, ErrorResponse, NoResultsResponse, ToolResponseBase, ) -from backend.server.v2.store import db as store_db -from backend.util.exceptions import DatabaseError, NotFoundError logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/models.py b/autogpt_platform/backend/backend/api/features/chat/tools/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/tools/models.py rename to autogpt_platform/backend/backend/api/features/chat/tools/models.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py rename to autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py index 9ba2eab893..931e075021 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py @@ -5,14 +5,21 @@ from typing import Any from pydantic import BaseModel, Field, field_validator +from backend.api.features.chat.config import ChatConfig +from backend.api.features.chat.model import ChatSession from backend.data.graph import GraphModel from backend.data.model import CredentialsMetaInput from backend.data.user import get_user_by_id from backend.executor import utils as execution_utils -from backend.server.v2.chat.config import ChatConfig -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools.base import BaseTool -from backend.server.v2.chat.tools.models import ( +from backend.util.clients import get_scheduler_client +from backend.util.exceptions import DatabaseError, NotFoundError +from backend.util.timezone_utils import ( + convert_utc_time_to_user_timezone, + get_user_timezone_or_utc, +) + +from .base import BaseTool +from .models import ( AgentDetails, AgentDetailsResponse, ErrorResponse, @@ -23,19 +30,13 @@ from backend.server.v2.chat.tools.models import ( ToolResponseBase, UserReadiness, ) -from backend.server.v2.chat.tools.utils import ( +from .utils import ( check_user_has_required_credentials, extract_credentials_from_schema, fetch_graph_from_store_slug, get_or_create_library_agent, match_user_credentials_to_graph, ) -from backend.util.clients import get_scheduler_client -from backend.util.exceptions import DatabaseError, NotFoundError -from backend.util.timezone_utils import ( - convert_utc_time_to_user_timezone, - get_user_timezone_or_utc, -) logger = logging.getLogger(__name__) config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py rename to autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py index 3ffd4a883e..ebad1a0050 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py @@ -3,13 +3,13 @@ import uuid import orjson import pytest -from backend.server.v2.chat.tools._test_data import ( +from ._test_data import ( make_session, setup_firecrawl_test_data, setup_llm_test_data, setup_test_data, ) -from backend.server.v2.chat.tools.run_agent import RunAgentTool +from .run_agent import RunAgentTool # This is so the formatter doesn't remove the fixture imports setup_llm_test_data = setup_llm_test_data diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/utils.py b/autogpt_platform/backend/backend/api/features/chat/tools/utils.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/tools/utils.py rename to autogpt_platform/backend/backend/api/features/chat/tools/utils.py index ef4bc6f272..19e092c312 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/utils.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/utils.py @@ -3,13 +3,13 @@ import logging from typing import Any +from backend.api.features.library import db as library_db +from backend.api.features.library import model as library_model +from backend.api.features.store import db as store_db from backend.data import graph as graph_db from backend.data.graph import GraphModel from backend.data.model import CredentialsMetaInput from backend.integrations.creds_manager import IntegrationCredentialsManager -from backend.server.v2.library import db as library_db -from backend.server.v2.library import model as library_model -from backend.server.v2.store import db as store_db from backend.util.exceptions import NotFoundError logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/executions/__init__.py b/autogpt_platform/backend/backend/api/features/executions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/api/features/executions/review/__init__.py b/autogpt_platform/backend/backend/api/features/executions/review/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/model.py b/autogpt_platform/backend/backend/api/features/executions/review/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/executions/review/model.py rename to autogpt_platform/backend/backend/api/features/executions/review/model.py diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py rename to autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py index 2e62641ad3..9d1df5f999 100644 --- a/autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py @@ -7,9 +7,10 @@ import pytest_mock from prisma.enums import ReviewStatus from pytest_snapshot.plugin import Snapshot -from backend.server.rest_api import handle_internal_http_error -from backend.server.v2.executions.review.model import PendingHumanReviewModel -from backend.server.v2.executions.review.routes import router +from backend.api.rest_api import handle_internal_http_error + +from .model import PendingHumanReviewModel +from .routes import router # Using a fixed timestamp for reproducible tests FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc) @@ -60,7 +61,7 @@ def test_get_pending_reviews_empty( ) -> None: """Test getting pending reviews when none exist""" mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_user" + "backend.api.features.executions.review.routes.get_pending_reviews_for_user" ) mock_get_reviews.return_value = [] @@ -79,7 +80,7 @@ def test_get_pending_reviews_with_data( ) -> None: """Test getting pending reviews with data""" mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_user" + "backend.api.features.executions.review.routes.get_pending_reviews_for_user" ) mock_get_reviews.return_value = [sample_pending_review] @@ -101,7 +102,7 @@ def test_get_pending_reviews_for_execution_success( ) -> None: """Test getting pending reviews for specific execution""" mock_get_graph_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_graph_execution_meta" + "backend.api.features.executions.review.routes.get_graph_execution_meta" ) mock_get_graph_execution.return_value = { "id": "test_graph_exec_456", @@ -109,7 +110,7 @@ def test_get_pending_reviews_for_execution_success( } mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews.return_value = [sample_pending_review] @@ -127,7 +128,7 @@ def test_get_pending_reviews_for_execution_access_denied( ) -> None: """Test access denied when user doesn't own the execution""" mock_get_graph_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_graph_execution_meta" + "backend.api.features.executions.review.routes.get_graph_execution_meta" ) mock_get_graph_execution.return_value = None @@ -146,12 +147,12 @@ def test_process_review_action_approve_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # Create approved review for return approved_review = PendingHumanReviewModel( @@ -174,11 +175,11 @@ def test_process_review_action_approve_success( mock_process_all_reviews.return_value = {"test_node_123": approved_review} mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False - mocker.patch("backend.server.v2.executions.review.routes.add_graph_execution") + mocker.patch("backend.api.features.executions.review.routes.add_graph_execution") request_data = { "reviews": [ @@ -210,12 +211,12 @@ def test_process_review_action_reject_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) rejected_review = PendingHumanReviewModel( node_exec_id="test_node_123", @@ -237,7 +238,7 @@ def test_process_review_action_reject_success( mock_process_all_reviews.return_value = {"test_node_123": rejected_review} mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False @@ -289,12 +290,12 @@ def test_process_review_action_mixed_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review, second_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # Create approved version of first review approved_review = PendingHumanReviewModel( @@ -338,7 +339,7 @@ def test_process_review_action_mixed_success( } mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False @@ -392,13 +393,13 @@ def test_process_review_action_review_not_found( """Test error when review is not found""" # Mock the functions that extract graph execution ID from the request mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [] # No reviews found # Mock process_all_reviews to simulate not finding reviews mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # This should raise a ValueError with "Reviews not found" message based on the data/human_review.py logic mock_process_all_reviews.side_effect = ValueError( @@ -429,13 +430,13 @@ def test_process_review_action_partial_failure( """Test handling of partial failures in review processing""" # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] # Mock partial failure in processing mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) mock_process_all_reviews.side_effect = ValueError("Some reviews failed validation") @@ -463,13 +464,13 @@ def test_process_review_action_invalid_node_exec_id( """Test failure when trying to process review with invalid node execution ID""" # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] # Mock validation failure - this should return 400, not 500 mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) mock_process_all_reviews.side_effect = ValueError( "Invalid node execution ID format" diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/routes.py b/autogpt_platform/backend/backend/api/features/executions/review/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/executions/review/routes.py rename to autogpt_platform/backend/backend/api/features/executions/review/routes.py index 14fb435457..4aa4fac49b 100644 --- a/autogpt_platform/backend/backend/server/v2/executions/review/routes.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/routes.py @@ -13,11 +13,8 @@ from backend.data.human_review import ( process_all_reviews_for_execution, ) from backend.executor.utils import add_graph_execution -from backend.server.v2.executions.review.model import ( - PendingHumanReviewModel, - ReviewRequest, - ReviewResponse, -) + +from .model import PendingHumanReviewModel, ReviewRequest, ReviewResponse logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/integrations/__init__.py b/autogpt_platform/backend/backend/api/features/integrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/integrations/models.py b/autogpt_platform/backend/backend/api/features/integrations/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/integrations/models.py rename to autogpt_platform/backend/backend/api/features/integrations/models.py diff --git a/autogpt_platform/backend/backend/server/integrations/router.py b/autogpt_platform/backend/backend/api/features/integrations/router.py similarity index 99% rename from autogpt_platform/backend/backend/server/integrations/router.py rename to autogpt_platform/backend/backend/api/features/integrations/router.py index b4227ad02a..f5dd8c092b 100644 --- a/autogpt_platform/backend/backend/server/integrations/router.py +++ b/autogpt_platform/backend/backend/api/features/integrations/router.py @@ -17,6 +17,8 @@ from fastapi import ( from pydantic import BaseModel, Field, SecretStr from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_502_BAD_GATEWAY +from backend.api.features.library.db import set_preset_webhook, update_preset +from backend.api.features.library.model import LibraryAgentPreset from backend.data.graph import NodeModel, get_graph, set_node_webhook from backend.data.integrations import ( WebhookEvent, @@ -45,13 +47,6 @@ from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME from backend.integrations.providers import ProviderName from backend.integrations.webhooks import get_webhook_manager -from backend.server.integrations.models import ( - ProviderConstants, - ProviderNamesResponse, - get_all_provider_names, -) -from backend.server.v2.library.db import set_preset_webhook, update_preset -from backend.server.v2.library.model import LibraryAgentPreset from backend.util.exceptions import ( GraphNotInLibraryError, MissingConfigError, @@ -60,6 +55,8 @@ from backend.util.exceptions import ( ) from backend.util.settings import Settings +from .models import ProviderConstants, ProviderNamesResponse, get_all_provider_names + if TYPE_CHECKING: from backend.integrations.oauth import BaseOAuthHandler diff --git a/autogpt_platform/backend/backend/api/features/library/__init__.py b/autogpt_platform/backend/backend/api/features/library/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/library/db.py b/autogpt_platform/backend/backend/api/features/library/db.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/library/db.py rename to autogpt_platform/backend/backend/api/features/library/db.py index 17a0efa7be..ad34326700 100644 --- a/autogpt_platform/backend/backend/server/v2/library/db.py +++ b/autogpt_platform/backend/backend/api/features/library/db.py @@ -4,16 +4,14 @@ from typing import Literal, Optional import fastapi import prisma.errors -import prisma.fields import prisma.models import prisma.types +import backend.api.features.store.exceptions as store_exceptions +import backend.api.features.store.image_gen as store_image_gen +import backend.api.features.store.media as store_media import backend.data.graph as graph_db import backend.data.integrations as integrations_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.exceptions as store_exceptions -import backend.server.v2.store.image_gen as store_image_gen -import backend.server.v2.store.media as store_media from backend.data.block import BlockInput from backend.data.db import transaction from backend.data.execution import get_graph_execution @@ -28,6 +26,8 @@ from backend.util.json import SafeJson from backend.util.models import Pagination from backend.util.settings import Config +from . import model as library_model + logger = logging.getLogger(__name__) config = Config() integration_creds_manager = IntegrationCredentialsManager() diff --git a/autogpt_platform/backend/backend/server/v2/library/db_test.py b/autogpt_platform/backend/backend/api/features/library/db_test.py similarity index 94% rename from autogpt_platform/backend/backend/server/v2/library/db_test.py rename to autogpt_platform/backend/backend/api/features/library/db_test.py index cb0095fb39..6023177070 100644 --- a/autogpt_platform/backend/backend/server/v2/library/db_test.py +++ b/autogpt_platform/backend/backend/api/features/library/db_test.py @@ -1,16 +1,15 @@ from datetime import datetime import prisma.enums -import prisma.errors import prisma.models -import prisma.types import pytest -import backend.server.v2.library.db as db -import backend.server.v2.store.exceptions +import backend.api.features.store.exceptions from backend.data.db import connect from backend.data.includes import library_agent_include +from . import db + @pytest.mark.asyncio async def test_get_library_agents(mocker): @@ -88,7 +87,7 @@ async def test_add_agent_to_library(mocker): await connect() # Mock the transaction context - mock_transaction = mocker.patch("backend.server.v2.library.db.transaction") + mock_transaction = mocker.patch("backend.api.features.library.db.transaction") mock_transaction.return_value.__aenter__ = mocker.AsyncMock(return_value=None) mock_transaction.return_value.__aexit__ = mocker.AsyncMock(return_value=None) # Mock data @@ -151,7 +150,7 @@ async def test_add_agent_to_library(mocker): ) # Mock graph_db.get_graph function that's called to check for HITL blocks - mock_graph_db = mocker.patch("backend.server.v2.library.db.graph_db") + mock_graph_db = mocker.patch("backend.api.features.library.db.graph_db") mock_graph_model = mocker.Mock() mock_graph_model.nodes = ( [] @@ -159,7 +158,9 @@ async def test_add_agent_to_library(mocker): mock_graph_db.get_graph = mocker.AsyncMock(return_value=mock_graph_model) # Mock the model conversion - mock_from_db = mocker.patch("backend.server.v2.library.model.LibraryAgent.from_db") + mock_from_db = mocker.patch( + "backend.api.features.library.model.LibraryAgent.from_db" + ) mock_from_db.return_value = mocker.Mock() # Call function @@ -217,7 +218,7 @@ async def test_add_agent_to_library_not_found(mocker): ) # Call function and verify exception - with pytest.raises(backend.server.v2.store.exceptions.AgentNotFoundError): + with pytest.raises(backend.api.features.store.exceptions.AgentNotFoundError): await db.add_store_agent_to_library("version123", "test-user") # Verify mock called correctly diff --git a/autogpt_platform/backend/backend/server/v2/library/model.py b/autogpt_platform/backend/backend/api/features/library/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/model.py rename to autogpt_platform/backend/backend/api/features/library/model.py diff --git a/autogpt_platform/backend/backend/server/v2/library/model_test.py b/autogpt_platform/backend/backend/api/features/library/model_test.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/library/model_test.py rename to autogpt_platform/backend/backend/api/features/library/model_test.py index d90ecf6f7a..a32b19322d 100644 --- a/autogpt_platform/backend/backend/server/v2/library/model_test.py +++ b/autogpt_platform/backend/backend/api/features/library/model_test.py @@ -3,7 +3,7 @@ import datetime import prisma.models import pytest -import backend.server.v2.library.model as library_model +from . import model as library_model @pytest.mark.asyncio diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/__init__.py b/autogpt_platform/backend/backend/api/features/library/routes/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/routes/__init__.py rename to autogpt_platform/backend/backend/api/features/library/routes/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/agents.py b/autogpt_platform/backend/backend/api/features/library/routes/agents.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/library/routes/agents.py rename to autogpt_platform/backend/backend/api/features/library/routes/agents.py index 173b6ab2b4..5a043009fc 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes/agents.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/agents.py @@ -6,12 +6,13 @@ from fastapi import APIRouter, Body, HTTPException, Query, Security, status from fastapi.responses import Response from prisma.enums import OnboardingStep -import backend.server.v2.library.db as library_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.exceptions as store_exceptions +import backend.api.features.store.exceptions as store_exceptions from backend.data.onboarding import complete_onboarding_step from backend.util.exceptions import DatabaseError, NotFoundError +from .. import db as library_db +from .. import model as library_model + logger = logging.getLogger(__name__) router = APIRouter( diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/presets.py b/autogpt_platform/backend/backend/api/features/library/routes/presets.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/library/routes/presets.py rename to autogpt_platform/backend/backend/api/features/library/routes/presets.py index b1810395f0..cd4c04e0f2 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes/presets.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/presets.py @@ -4,8 +4,6 @@ from typing import Any, Optional import autogpt_libs.auth as autogpt_auth_lib from fastapi import APIRouter, Body, HTTPException, Query, Security, status -import backend.server.v2.library.db as db -import backend.server.v2.library.model as models from backend.data.execution import GraphExecutionMeta from backend.data.graph import get_graph from backend.data.integrations import get_webhook @@ -17,6 +15,9 @@ from backend.integrations.webhooks import get_webhook_manager from backend.integrations.webhooks.utils import setup_webhook_for_block from backend.util.exceptions import NotFoundError +from .. import db +from .. import model as models + logger = logging.getLogger(__name__) credentials_manager = IntegrationCredentialsManager() diff --git a/autogpt_platform/backend/backend/server/v2/library/routes_test.py b/autogpt_platform/backend/backend/api/features/library/routes_test.py similarity index 93% rename from autogpt_platform/backend/backend/server/v2/library/routes_test.py rename to autogpt_platform/backend/backend/api/features/library/routes_test.py index bd5d9827de..ad28b5b6bd 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/library/routes_test.py @@ -7,10 +7,11 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.library.model as library_model -from backend.server.v2.library.routes import router as library_router from backend.util.models import Pagination +from . import model as library_model +from .routes import router as library_router + app = fastapi.FastAPI() app.include_router(library_router) @@ -86,7 +87,7 @@ async def test_get_library_agents_success( total_items=2, total_pages=1, current_page=1, page_size=50 ), ) - mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents") + mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?search_term=test") @@ -112,7 +113,7 @@ async def test_get_library_agents_success( def test_get_library_agents_error(mocker: pytest_mock.MockFixture, test_user_id: str): - mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents") + mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents") mock_db_call.side_effect = Exception("Test error") response = client.get("/agents?search_term=test") @@ -161,7 +162,7 @@ async def test_get_favorite_library_agents_success( ), ) mock_db_call = mocker.patch( - "backend.server.v2.library.db.list_favorite_library_agents" + "backend.api.features.library.db.list_favorite_library_agents" ) mock_db_call.return_value = mocked_value @@ -184,7 +185,7 @@ def test_get_favorite_library_agents_error( mocker: pytest_mock.MockFixture, test_user_id: str ): mock_db_call = mocker.patch( - "backend.server.v2.library.db.list_favorite_library_agents" + "backend.api.features.library.db.list_favorite_library_agents" ) mock_db_call.side_effect = Exception("Test error") @@ -223,11 +224,11 @@ def test_add_agent_to_library_success( ) mock_db_call = mocker.patch( - "backend.server.v2.library.db.add_store_agent_to_library" + "backend.api.features.library.db.add_store_agent_to_library" ) mock_db_call.return_value = mock_library_agent mock_complete_onboarding = mocker.patch( - "backend.server.v2.library.routes.agents.complete_onboarding_step", + "backend.api.features.library.routes.agents.complete_onboarding_step", new_callable=AsyncMock, ) @@ -249,7 +250,7 @@ def test_add_agent_to_library_success( def test_add_agent_to_library_error(mocker: pytest_mock.MockFixture, test_user_id: str): mock_db_call = mocker.patch( - "backend.server.v2.library.db.add_store_agent_to_library" + "backend.api.features.library.db.add_store_agent_to_library" ) mock_db_call.side_effect = Exception("Test error") diff --git a/autogpt_platform/backend/backend/server/routers/oauth.py b/autogpt_platform/backend/backend/api/features/oauth.py similarity index 99% rename from autogpt_platform/backend/backend/server/routers/oauth.py rename to autogpt_platform/backend/backend/api/features/oauth.py index 55f591427a..023a433951 100644 --- a/autogpt_platform/backend/backend/server/routers/oauth.py +++ b/autogpt_platform/backend/backend/api/features/oauth.py @@ -5,11 +5,11 @@ Implements OAuth 2.0 Authorization Code flow with PKCE support. Flow: 1. User clicks "Login with AutoGPT" in 3rd party app -2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state +2. App redirects user to /auth/authorize with client_id, redirect_uri, scope, state 3. User sees consent screen (if not already logged in, redirects to login first) 4. User approves → backend creates authorization code 5. User redirected back to app with code -6. App exchanges code for access/refresh tokens at /oauth/token +6. App exchanges code for access/refresh tokens at /api/oauth/token 7. App uses access token to call external API endpoints """ diff --git a/autogpt_platform/backend/backend/server/routers/oauth_test.py b/autogpt_platform/backend/backend/api/features/oauth_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/routers/oauth_test.py rename to autogpt_platform/backend/backend/api/features/oauth_test.py index 8ec6911152..5f6b85a88a 100644 --- a/autogpt_platform/backend/backend/server/routers/oauth_test.py +++ b/autogpt_platform/backend/backend/api/features/oauth_test.py @@ -28,7 +28,7 @@ from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken from prisma.models import User as PrismaUser -from backend.server.rest_api import app +from backend.api.rest_api import app keysmith = APIKeySmith() diff --git a/autogpt_platform/backend/backend/api/features/otto/__init__.py b/autogpt_platform/backend/backend/api/features/otto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/otto/models.py b/autogpt_platform/backend/backend/api/features/otto/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/models.py rename to autogpt_platform/backend/backend/api/features/otto/models.py diff --git a/autogpt_platform/backend/backend/server/v2/otto/routes.py b/autogpt_platform/backend/backend/api/features/otto/routes.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/routes.py rename to autogpt_platform/backend/backend/api/features/otto/routes.py diff --git a/autogpt_platform/backend/backend/server/v2/otto/routes_test.py b/autogpt_platform/backend/backend/api/features/otto/routes_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/v2/otto/routes_test.py rename to autogpt_platform/backend/backend/api/features/otto/routes_test.py index 2641babe2b..416bcdee76 100644 --- a/autogpt_platform/backend/backend/server/v2/otto/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/otto/routes_test.py @@ -6,9 +6,9 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.otto.models as otto_models -import backend.server.v2.otto.routes as otto_routes -from backend.server.v2.otto.service import OttoService +from . import models as otto_models +from . import routes as otto_routes +from .service import OttoService app = fastapi.FastAPI() app.include_router(otto_routes.router) diff --git a/autogpt_platform/backend/backend/server/v2/otto/service.py b/autogpt_platform/backend/backend/api/features/otto/service.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/service.py rename to autogpt_platform/backend/backend/api/features/otto/service.py diff --git a/autogpt_platform/backend/backend/api/features/postmark/__init__.py b/autogpt_platform/backend/backend/api/features/postmark/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/routers/postmark/models.py b/autogpt_platform/backend/backend/api/features/postmark/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/routers/postmark/models.py rename to autogpt_platform/backend/backend/api/features/postmark/models.py diff --git a/autogpt_platform/backend/backend/server/routers/postmark/postmark.py b/autogpt_platform/backend/backend/api/features/postmark/postmark.py similarity index 96% rename from autogpt_platform/backend/backend/server/routers/postmark/postmark.py rename to autogpt_platform/backend/backend/api/features/postmark/postmark.py index 2190aa5fce..224e30fa9d 100644 --- a/autogpt_platform/backend/backend/server/routers/postmark/postmark.py +++ b/autogpt_platform/backend/backend/api/features/postmark/postmark.py @@ -4,12 +4,15 @@ from typing import Annotated from fastapi import APIRouter, Body, HTTPException, Query, Security from fastapi.responses import JSONResponse +from backend.api.utils.api_key_auth import APIKeyAuthenticator from backend.data.user import ( get_user_by_email, set_user_email_verification, unsubscribe_user_by_token, ) -from backend.server.routers.postmark.models import ( +from backend.util.settings import Settings + +from .models import ( PostmarkBounceEnum, PostmarkBounceWebhook, PostmarkClickWebhook, @@ -19,8 +22,6 @@ from backend.server.routers.postmark.models import ( PostmarkSubscriptionChangeWebhook, PostmarkWebhook, ) -from backend.server.utils.api_key_auth import APIKeyAuthenticator -from backend.util.settings import Settings logger = logging.getLogger(__name__) settings = Settings() diff --git a/autogpt_platform/backend/backend/server/v2/store/README.md b/autogpt_platform/backend/backend/api/features/store/README.md similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/README.md rename to autogpt_platform/backend/backend/api/features/store/README.md diff --git a/autogpt_platform/backend/backend/api/features/store/__init__.py b/autogpt_platform/backend/backend/api/features/store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/store/cache.py b/autogpt_platform/backend/backend/api/features/store/cache.py similarity index 85% rename from autogpt_platform/backend/backend/server/v2/store/cache.py rename to autogpt_platform/backend/backend/api/features/store/cache.py index 574403342e..7832069d49 100644 --- a/autogpt_platform/backend/backend/server/v2/store/cache.py +++ b/autogpt_platform/backend/backend/api/features/store/cache.py @@ -1,8 +1,9 @@ from typing import Literal -import backend.server.v2.store.db from backend.util.cache import cached +from . import db as store_db + ############################################## ############### Caches ####################### ############################################## @@ -29,7 +30,7 @@ async def _get_cached_store_agents( page_size: int, ): """Cached helper to get store agents.""" - return await backend.server.v2.store.db.get_store_agents( + return await store_db.get_store_agents( featured=featured, creators=[creator] if creator else None, sorted_by=sorted_by, @@ -44,7 +45,7 @@ async def _get_cached_store_agents( @cached(maxsize=200, ttl_seconds=300, shared_cache=True) async def _get_cached_agent_details(username: str, agent_name: str): """Cached helper to get agent details.""" - return await backend.server.v2.store.db.get_store_agent_details( + return await store_db.get_store_agent_details( username=username, agent_name=agent_name ) @@ -59,7 +60,7 @@ async def _get_cached_store_creators( page_size: int, ): """Cached helper to get store creators.""" - return await backend.server.v2.store.db.get_store_creators( + return await store_db.get_store_creators( featured=featured, search_query=search_query, sorted_by=sorted_by, @@ -72,6 +73,4 @@ async def _get_cached_store_creators( @cached(maxsize=100, ttl_seconds=300, shared_cache=True) async def _get_cached_creator_details(username: str): """Cached helper to get creator details.""" - return await backend.server.v2.store.db.get_store_creator_details( - username=username.lower() - ) + return await store_db.get_store_creator_details(username=username.lower()) diff --git a/autogpt_platform/backend/backend/server/v2/store/db.py b/autogpt_platform/backend/backend/api/features/store/db.py similarity index 92% rename from autogpt_platform/backend/backend/server/v2/store/db.py rename to autogpt_platform/backend/backend/api/features/store/db.py index 33554a9c2a..12f1783468 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db.py +++ b/autogpt_platform/backend/backend/api/features/store/db.py @@ -10,8 +10,6 @@ import prisma.errors import prisma.models import prisma.types -import backend.server.v2.store.exceptions -import backend.server.v2.store.model from backend.data.db import query_raw_with_schema, transaction from backend.data.graph import ( GraphMeta, @@ -30,6 +28,9 @@ from backend.notifications.notifications import queue_notification_async from backend.util.exceptions import DatabaseError from backend.util.settings import Settings +from . import exceptions as store_exceptions +from . import model as store_model + logger = logging.getLogger(__name__) settings = Settings() @@ -47,7 +48,7 @@ async def get_store_agents( category: str | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.StoreAgentsResponse: +) -> store_model.StoreAgentsResponse: """ Get PUBLIC store agents from the StoreAgent view """ @@ -148,10 +149,10 @@ async def get_store_agents( total_pages = (total + page_size - 1) // page_size # Convert raw results to StoreAgent models - store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + store_agents: list[store_model.StoreAgent] = [] for agent in agents: try: - store_agent = backend.server.v2.store.model.StoreAgent( + store_agent = store_model.StoreAgent( slug=agent["slug"], agent_name=agent["agent_name"], agent_image=( @@ -197,11 +198,11 @@ async def get_store_agents( total = await prisma.models.StoreAgent.prisma().count(where=where_clause) total_pages = (total + page_size - 1) // page_size - store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + store_agents: list[store_model.StoreAgent] = [] for agent in agents: try: # Create the StoreAgent object safely - store_agent = backend.server.v2.store.model.StoreAgent( + store_agent = store_model.StoreAgent( slug=agent.slug, agent_name=agent.agent_name, agent_image=agent.agent_image[0] if agent.agent_image else "", @@ -223,9 +224,9 @@ async def get_store_agents( continue logger.debug(f"Found {len(store_agents)} agents") - return backend.server.v2.store.model.StoreAgentsResponse( + return store_model.StoreAgentsResponse( agents=store_agents, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -257,7 +258,7 @@ async def log_search_term(search_query: str): async def get_store_agent_details( username: str, agent_name: str -) -> backend.server.v2.store.model.StoreAgentDetails: +) -> store_model.StoreAgentDetails: """Get PUBLIC store agent details from the StoreAgent view""" logger.debug(f"Getting store agent details for {username}/{agent_name}") @@ -268,7 +269,7 @@ async def get_store_agent_details( if not agent: logger.warning(f"Agent not found: {username}/{agent_name}") - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent {username}/{agent_name} not found" ) @@ -322,7 +323,7 @@ async def get_store_agent_details( recommended_schedule_cron = None logger.debug(f"Found agent details for {username}/{agent_name}") - return backend.server.v2.store.model.StoreAgentDetails( + return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, slug=agent.slug, agent_name=agent.agent_name, @@ -342,7 +343,7 @@ async def get_store_agent_details( has_approved_version=has_approved_version, recommended_schedule_cron=recommended_schedule_cron, ) - except backend.server.v2.store.exceptions.AgentNotFoundError: + except store_exceptions.AgentNotFoundError: raise except Exception as e: logger.error(f"Error getting store agent details: {e}") @@ -378,7 +379,7 @@ async def get_available_graph(store_listing_version_id: str) -> GraphMeta: async def get_store_agent_by_version_id( store_listing_version_id: str, -) -> backend.server.v2.store.model.StoreAgentDetails: +) -> store_model.StoreAgentDetails: logger.debug(f"Getting store agent details for {store_listing_version_id}") try: @@ -388,12 +389,12 @@ async def get_store_agent_by_version_id( if not agent: logger.warning(f"Agent not found: {store_listing_version_id}") - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent {store_listing_version_id} not found" ) logger.debug(f"Found agent details for {store_listing_version_id}") - return backend.server.v2.store.model.StoreAgentDetails( + return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, slug=agent.slug, agent_name=agent.agent_name, @@ -410,7 +411,7 @@ async def get_store_agent_by_version_id( versions=agent.versions, last_updated=agent.updated_at, ) - except backend.server.v2.store.exceptions.AgentNotFoundError: + except store_exceptions.AgentNotFoundError: raise except Exception as e: logger.error(f"Error getting store agent details: {e}") @@ -423,7 +424,7 @@ async def get_store_creators( sorted_by: Literal["agent_rating", "agent_runs", "num_agents"] | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.CreatorsResponse: +) -> store_model.CreatorsResponse: """Get PUBLIC store creators from the Creator view""" logger.debug( f"Getting store creators. featured={featured}, search={search_query}, sorted_by={sorted_by}, page={page}" @@ -498,7 +499,7 @@ async def get_store_creators( # Convert to response model creator_models = [ - backend.server.v2.store.model.Creator( + store_model.Creator( username=creator.username, name=creator.name, description=creator.description, @@ -512,9 +513,9 @@ async def get_store_creators( ] logger.debug(f"Found {len(creator_models)} creators") - return backend.server.v2.store.model.CreatorsResponse( + return store_model.CreatorsResponse( creators=creator_models, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -528,7 +529,7 @@ async def get_store_creators( async def get_store_creator_details( username: str, -) -> backend.server.v2.store.model.CreatorDetails: +) -> store_model.CreatorDetails: logger.debug(f"Getting store creator details for {username}") try: @@ -539,12 +540,10 @@ async def get_store_creator_details( if not creator: logger.warning(f"Creator not found: {username}") - raise backend.server.v2.store.exceptions.CreatorNotFoundError( - f"Creator {username} not found" - ) + raise store_exceptions.CreatorNotFoundError(f"Creator {username} not found") logger.debug(f"Found creator details for {username}") - return backend.server.v2.store.model.CreatorDetails( + return store_model.CreatorDetails( name=creator.name, username=creator.username, description=creator.description, @@ -554,7 +553,7 @@ async def get_store_creator_details( agent_runs=creator.agent_runs, top_categories=creator.top_categories, ) - except backend.server.v2.store.exceptions.CreatorNotFoundError: + except store_exceptions.CreatorNotFoundError: raise except Exception as e: logger.error(f"Error getting store creator details: {e}") @@ -563,7 +562,7 @@ async def get_store_creator_details( async def get_store_submissions( user_id: str, page: int = 1, page_size: int = 20 -) -> backend.server.v2.store.model.StoreSubmissionsResponse: +) -> store_model.StoreSubmissionsResponse: """Get store submissions for the authenticated user -- not an admin""" logger.debug(f"Getting store submissions for user {user_id}, page={page}") @@ -588,7 +587,7 @@ async def get_store_submissions( # Convert to response models submission_models = [] for sub in submissions: - submission_model = backend.server.v2.store.model.StoreSubmission( + submission_model = store_model.StoreSubmission( agent_id=sub.agent_id, agent_version=sub.agent_version, name=sub.name, @@ -613,9 +612,9 @@ async def get_store_submissions( submission_models.append(submission_model) logger.debug(f"Found {len(submission_models)} submissions") - return backend.server.v2.store.model.StoreSubmissionsResponse( + return store_model.StoreSubmissionsResponse( submissions=submission_models, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -626,9 +625,9 @@ async def get_store_submissions( except Exception as e: logger.error(f"Error fetching store submissions: {e}") # Return empty response rather than exposing internal errors - return backend.server.v2.store.model.StoreSubmissionsResponse( + return store_model.StoreSubmissionsResponse( submissions=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=0, total_pages=0, @@ -661,7 +660,7 @@ async def delete_store_submission( if not submission: logger.warning(f"Submission not found for user {user_id}: {submission_id}") - raise backend.server.v2.store.exceptions.SubmissionNotFoundError( + raise store_exceptions.SubmissionNotFoundError( f"Submission not found for this user. User ID: {user_id}, Submission ID: {submission_id}" ) @@ -693,7 +692,7 @@ async def create_store_submission( categories: list[str] = [], changes_summary: str | None = "Initial Submission", recommended_schedule_cron: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Create the first (and only) store listing and thus submission as a normal user @@ -734,7 +733,7 @@ async def create_store_submission( logger.warning( f"Agent not found for user {user_id}: {agent_id} v{agent_version}" ) - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}" ) @@ -807,7 +806,7 @@ async def create_store_submission( logger.debug(f"Created store listing for agent {agent_id}") # Return submission details - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=agent_id, agent_version=agent_version, name=name, @@ -830,7 +829,7 @@ async def create_store_submission( logger.debug( f"Slug '{slug}' is already in use by another agent (agent_id: {agent_id}) for user {user_id}" ) - raise backend.server.v2.store.exceptions.SlugAlreadyInUseError( + raise store_exceptions.SlugAlreadyInUseError( f"The URL slug '{slug}' is already in use by another one of your agents. Please choose a different slug." ) from exc else: @@ -839,8 +838,8 @@ async def create_store_submission( f"Unique constraint violated (not slug): {error_str}" ) from exc except ( - backend.server.v2.store.exceptions.AgentNotFoundError, - backend.server.v2.store.exceptions.ListingExistsError, + store_exceptions.AgentNotFoundError, + store_exceptions.ListingExistsError, ): raise except prisma.errors.PrismaError as e: @@ -861,7 +860,7 @@ async def edit_store_submission( changes_summary: str | None = "Update submission", recommended_schedule_cron: str | None = None, instructions: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Edit an existing store listing submission. @@ -903,7 +902,7 @@ async def edit_store_submission( ) if not current_version: - raise backend.server.v2.store.exceptions.SubmissionNotFoundError( + raise store_exceptions.SubmissionNotFoundError( f"Store listing version not found: {store_listing_version_id}" ) @@ -912,7 +911,7 @@ async def edit_store_submission( not current_version.StoreListing or current_version.StoreListing.owningUserId != user_id ): - raise backend.server.v2.store.exceptions.UnauthorizedError( + raise store_exceptions.UnauthorizedError( f"User {user_id} does not own submission {store_listing_version_id}" ) @@ -921,7 +920,7 @@ async def edit_store_submission( # Check if we can edit this submission if current_version.submissionStatus == prisma.enums.SubmissionStatus.REJECTED: - raise backend.server.v2.store.exceptions.InvalidOperationError( + raise store_exceptions.InvalidOperationError( "Cannot edit a rejected submission" ) @@ -970,7 +969,7 @@ async def edit_store_submission( if not updated_version: raise DatabaseError("Failed to update store listing version") - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=current_version.agentGraphId, agent_version=current_version.agentGraphVersion, name=name, @@ -991,16 +990,16 @@ async def edit_store_submission( ) else: - raise backend.server.v2.store.exceptions.InvalidOperationError( + raise store_exceptions.InvalidOperationError( f"Cannot edit submission with status: {current_version.submissionStatus}" ) except ( - backend.server.v2.store.exceptions.SubmissionNotFoundError, - backend.server.v2.store.exceptions.UnauthorizedError, - backend.server.v2.store.exceptions.AgentNotFoundError, - backend.server.v2.store.exceptions.ListingExistsError, - backend.server.v2.store.exceptions.InvalidOperationError, + store_exceptions.SubmissionNotFoundError, + store_exceptions.UnauthorizedError, + store_exceptions.AgentNotFoundError, + store_exceptions.ListingExistsError, + store_exceptions.InvalidOperationError, ): raise except prisma.errors.PrismaError as e: @@ -1023,7 +1022,7 @@ async def create_store_version( categories: list[str] = [], changes_summary: str | None = "Initial submission", recommended_schedule_cron: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Create a new version for an existing store listing @@ -1056,7 +1055,7 @@ async def create_store_version( ) if not listing: - raise backend.server.v2.store.exceptions.ListingNotFoundError( + raise store_exceptions.ListingNotFoundError( f"Store listing not found. User ID: {user_id}, Listing ID: {store_listing_id}" ) @@ -1068,7 +1067,7 @@ async def create_store_version( ) if not agent: - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}" ) @@ -1103,7 +1102,7 @@ async def create_store_version( f"Created new version for listing {store_listing_id} of agent {agent_id}" ) # Return submission details - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=agent_id, agent_version=agent_version, name=name, @@ -1130,7 +1129,7 @@ async def create_store_review( store_listing_version_id: str, score: int, comments: str | None = None, -) -> backend.server.v2.store.model.StoreReview: +) -> store_model.StoreReview: """Create a review for a store listing as a user to detail their experience""" try: data = prisma.types.StoreListingReviewUpsertInput( @@ -1155,7 +1154,7 @@ async def create_store_review( data=data, ) - return backend.server.v2.store.model.StoreReview( + return store_model.StoreReview( score=review.score, comments=review.comments, ) @@ -1167,7 +1166,7 @@ async def create_store_review( async def get_user_profile( user_id: str, -) -> backend.server.v2.store.model.ProfileDetails | None: +) -> store_model.ProfileDetails | None: logger.debug(f"Getting user profile for {user_id}") try: @@ -1177,7 +1176,7 @@ async def get_user_profile( if not profile: return None - return backend.server.v2.store.model.ProfileDetails( + return store_model.ProfileDetails( name=profile.name, username=profile.username, description=profile.description, @@ -1190,8 +1189,8 @@ async def get_user_profile( async def update_profile( - user_id: str, profile: backend.server.v2.store.model.Profile -) -> backend.server.v2.store.model.CreatorDetails: + user_id: str, profile: store_model.Profile +) -> store_model.CreatorDetails: """ Update the store profile for a user or create a new one if it doesn't exist. Args: @@ -1214,7 +1213,7 @@ async def update_profile( where={"userId": user_id} ) if not existing_profile: - raise backend.server.v2.store.exceptions.ProfileNotFoundError( + raise store_exceptions.ProfileNotFoundError( f"Profile not found for user {user_id}. This should not be possible." ) @@ -1250,7 +1249,7 @@ async def update_profile( logger.error(f"Failed to update profile for user {user_id}") raise DatabaseError("Failed to update profile") - return backend.server.v2.store.model.CreatorDetails( + return store_model.CreatorDetails( name=updated_profile.name, username=updated_profile.username, description=updated_profile.description, @@ -1270,7 +1269,7 @@ async def get_my_agents( user_id: str, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.MyAgentsResponse: +) -> store_model.MyAgentsResponse: """Get the agents for the authenticated user""" logger.debug(f"Getting my agents for user {user_id}, page={page}") @@ -1307,7 +1306,7 @@ async def get_my_agents( total_pages = (total + page_size - 1) // page_size my_agents = [ - backend.server.v2.store.model.MyAgent( + store_model.MyAgent( agent_id=graph.id, agent_version=graph.version, agent_name=graph.name or "", @@ -1320,9 +1319,9 @@ async def get_my_agents( if (graph := library_agent.AgentGraph) ] - return backend.server.v2.store.model.MyAgentsResponse( + return store_model.MyAgentsResponse( agents=my_agents, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -1469,7 +1468,7 @@ async def review_store_submission( external_comments: str, internal_comments: str, reviewer_id: str, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """Review a store listing submission as an admin.""" try: store_listing_version = ( @@ -1682,7 +1681,7 @@ async def review_store_submission( pass # Convert to Pydantic model for consistency - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=submission.agentGraphId, agent_version=submission.agentGraphVersion, name=submission.name, @@ -1717,7 +1716,7 @@ async def get_admin_listings_with_versions( search_query: str | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.StoreListingsWithVersionsResponse: +) -> store_model.StoreListingsWithVersionsResponse: """ Get store listings for admins with all their versions. @@ -1816,10 +1815,10 @@ async def get_admin_listings_with_versions( # Convert to response models listings_with_versions = [] for listing in listings: - versions: list[backend.server.v2.store.model.StoreSubmission] = [] + versions: list[store_model.StoreSubmission] = [] # If we have versions, turn them into StoreSubmission models for version in listing.Versions or []: - version_model = backend.server.v2.store.model.StoreSubmission( + version_model = store_model.StoreSubmission( agent_id=version.agentGraphId, agent_version=version.agentGraphVersion, name=version.name, @@ -1847,26 +1846,24 @@ async def get_admin_listings_with_versions( creator_email = listing.OwningUser.email if listing.OwningUser else None - listing_with_versions = ( - backend.server.v2.store.model.StoreListingWithVersions( - listing_id=listing.id, - slug=listing.slug, - agent_id=listing.agentGraphId, - agent_version=listing.agentGraphVersion, - active_version_id=listing.activeVersionId, - has_approved_version=listing.hasApprovedVersion, - creator_email=creator_email, - latest_version=latest_version, - versions=versions, - ) + listing_with_versions = store_model.StoreListingWithVersions( + listing_id=listing.id, + slug=listing.slug, + agent_id=listing.agentGraphId, + agent_version=listing.agentGraphVersion, + active_version_id=listing.activeVersionId, + has_approved_version=listing.hasApprovedVersion, + creator_email=creator_email, + latest_version=latest_version, + versions=versions, ) listings_with_versions.append(listing_with_versions) logger.debug(f"Found {len(listings_with_versions)} listings for admin") - return backend.server.v2.store.model.StoreListingsWithVersionsResponse( + return store_model.StoreListingsWithVersionsResponse( listings=listings_with_versions, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -1876,9 +1873,9 @@ async def get_admin_listings_with_versions( except Exception as e: logger.error(f"Error fetching admin store listings: {e}") # Return empty response rather than exposing internal errors - return backend.server.v2.store.model.StoreListingsWithVersionsResponse( + return store_model.StoreListingsWithVersionsResponse( listings=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=0, total_pages=0, diff --git a/autogpt_platform/backend/backend/server/v2/store/db_test.py b/autogpt_platform/backend/backend/api/features/store/db_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/store/db_test.py rename to autogpt_platform/backend/backend/api/features/store/db_test.py index bf7cf39d6a..641f392d86 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db_test.py +++ b/autogpt_platform/backend/backend/api/features/store/db_test.py @@ -6,8 +6,8 @@ import prisma.models import pytest from prisma import Prisma -import backend.server.v2.store.db as db -from backend.server.v2.store.model import Profile +from . import db +from .model import Profile @pytest.fixture(autouse=True) diff --git a/autogpt_platform/backend/backend/server/v2/store/exceptions.py b/autogpt_platform/backend/backend/api/features/store/exceptions.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/exceptions.py rename to autogpt_platform/backend/backend/api/features/store/exceptions.py diff --git a/autogpt_platform/backend/backend/server/v2/store/image_gen.py b/autogpt_platform/backend/backend/api/features/store/image_gen.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/image_gen.py rename to autogpt_platform/backend/backend/api/features/store/image_gen.py diff --git a/autogpt_platform/backend/backend/server/v2/store/media.py b/autogpt_platform/backend/backend/api/features/store/media.py similarity index 81% rename from autogpt_platform/backend/backend/server/v2/store/media.py rename to autogpt_platform/backend/backend/api/features/store/media.py index 88542dd2c8..cfdc71567a 100644 --- a/autogpt_platform/backend/backend/server/v2/store/media.py +++ b/autogpt_platform/backend/backend/api/features/store/media.py @@ -5,11 +5,12 @@ import uuid import fastapi from gcloud.aio import storage as async_storage -import backend.server.v2.store.exceptions from backend.util.exceptions import MissingConfigError from backend.util.settings import Settings from backend.util.virus_scanner import scan_content_safe +from . import exceptions as store_exceptions + logger = logging.getLogger(__name__) ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp"} @@ -68,61 +69,55 @@ async def upload_media( await file.seek(0) # Reset file pointer except Exception as e: logger.error(f"Error reading file content: {str(e)}") - raise backend.server.v2.store.exceptions.FileReadError( - "Failed to read file content" - ) from e + raise store_exceptions.FileReadError("Failed to read file content") from e # Validate file signature/magic bytes if file.content_type in ALLOWED_IMAGE_TYPES: # Check image file signatures if content.startswith(b"\xff\xd8\xff"): # JPEG if file.content_type != "image/jpeg": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"\x89PNG\r\n\x1a\n"): # PNG if file.content_type != "image/png": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"GIF87a") or content.startswith(b"GIF89a"): # GIF if file.content_type != "image/gif": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"RIFF") and content[8:12] == b"WEBP": # WebP if file.content_type != "image/webp": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) else: - raise backend.server.v2.store.exceptions.InvalidFileTypeError( - "Invalid image file signature" - ) + raise store_exceptions.InvalidFileTypeError("Invalid image file signature") elif file.content_type in ALLOWED_VIDEO_TYPES: # Check video file signatures if content.startswith(b"\x00\x00\x00") and (content[4:8] == b"ftyp"): # MP4 if file.content_type != "video/mp4": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"\x1a\x45\xdf\xa3"): # WebM if file.content_type != "video/webm": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) else: - raise backend.server.v2.store.exceptions.InvalidFileTypeError( - "Invalid video file signature" - ) + raise store_exceptions.InvalidFileTypeError("Invalid video file signature") settings = Settings() # Check required settings first before doing any file processing if not settings.config.media_gcs_bucket_name: logger.error("Missing GCS bucket name setting") - raise backend.server.v2.store.exceptions.StorageConfigError( + raise store_exceptions.StorageConfigError( "Missing storage bucket configuration" ) @@ -137,7 +132,7 @@ async def upload_media( and content_type not in ALLOWED_VIDEO_TYPES ): logger.warning(f"Invalid file type attempted: {content_type}") - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( f"File type not supported. Must be jpeg, png, gif, webp, mp4 or webm. Content type: {content_type}" ) @@ -150,16 +145,14 @@ async def upload_media( file_size += len(chunk) if file_size > MAX_FILE_SIZE: logger.warning(f"File size too large: {file_size} bytes") - raise backend.server.v2.store.exceptions.FileSizeTooLargeError( + raise store_exceptions.FileSizeTooLargeError( "File too large. Maximum size is 50MB" ) - except backend.server.v2.store.exceptions.FileSizeTooLargeError: + except store_exceptions.FileSizeTooLargeError: raise except Exception as e: logger.error(f"Error reading file chunks: {str(e)}") - raise backend.server.v2.store.exceptions.FileReadError( - "Failed to read uploaded file" - ) from e + raise store_exceptions.FileReadError("Failed to read uploaded file") from e # Reset file pointer await file.seek(0) @@ -198,14 +191,14 @@ async def upload_media( except Exception as e: logger.error(f"GCS storage error: {str(e)}") - raise backend.server.v2.store.exceptions.StorageUploadError( + raise store_exceptions.StorageUploadError( "Failed to upload file to storage" ) from e - except backend.server.v2.store.exceptions.MediaUploadError: + except store_exceptions.MediaUploadError: raise except Exception as e: logger.exception("Unexpected error in upload_media") - raise backend.server.v2.store.exceptions.MediaUploadError( + raise store_exceptions.MediaUploadError( "Unexpected error during media upload" ) from e diff --git a/autogpt_platform/backend/backend/server/v2/store/media_test.py b/autogpt_platform/backend/backend/api/features/store/media_test.py similarity index 75% rename from autogpt_platform/backend/backend/server/v2/store/media_test.py rename to autogpt_platform/backend/backend/api/features/store/media_test.py index 3722d2fdc3..7f3899c8a5 100644 --- a/autogpt_platform/backend/backend/server/v2/store/media_test.py +++ b/autogpt_platform/backend/backend/api/features/store/media_test.py @@ -6,17 +6,18 @@ import fastapi import pytest import starlette.datastructures -import backend.server.v2.store.exceptions -import backend.server.v2.store.media from backend.util.settings import Settings +from . import exceptions as store_exceptions +from . import media as store_media + @pytest.fixture def mock_settings(monkeypatch): settings = Settings() settings.config.media_gcs_bucket_name = "test-bucket" settings.config.google_application_credentials = "test-credentials" - monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings) + monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings) return settings @@ -32,12 +33,13 @@ def mock_storage_client(mocker): # Mock the constructor to return our mock client mocker.patch( - "backend.server.v2.store.media.async_storage.Storage", return_value=mock_client + "backend.api.features.store.media.async_storage.Storage", + return_value=mock_client, ) # Mock virus scanner to avoid actual scanning mocker.patch( - "backend.server.v2.store.media.scan_content_safe", new_callable=AsyncMock + "backend.api.features.store.media.scan_content_safe", new_callable=AsyncMock ) return mock_client @@ -53,7 +55,7 @@ async def test_upload_media_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" @@ -69,8 +71,8 @@ async def test_upload_media_invalid_type(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) mock_storage_client.upload.assert_not_called() @@ -79,7 +81,7 @@ async def test_upload_media_missing_credentials(monkeypatch): settings = Settings() settings.config.media_gcs_bucket_name = "" settings.config.google_application_credentials = "" - monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings) + monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings) test_file = fastapi.UploadFile( filename="laptop.jpeg", @@ -87,8 +89,8 @@ async def test_upload_media_missing_credentials(monkeypatch): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.StorageConfigError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.StorageConfigError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_video_type(mock_settings, mock_storage_client): @@ -98,7 +100,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "video/mp4"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/videos/" @@ -117,8 +119,8 @@ async def test_upload_media_file_too_large(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.FileSizeTooLargeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.FileSizeTooLargeError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_file_read_error(mock_settings, mock_storage_client): @@ -129,8 +131,8 @@ async def test_upload_media_file_read_error(mock_settings, mock_storage_client): ) test_file.read = unittest.mock.AsyncMock(side_effect=Exception("Read error")) - with pytest.raises(backend.server.v2.store.exceptions.FileReadError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.FileReadError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_png_success(mock_settings, mock_storage_client): @@ -140,7 +142,7 @@ async def test_upload_media_png_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/png"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -154,7 +156,7 @@ async def test_upload_media_gif_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/gif"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -168,7 +170,7 @@ async def test_upload_media_webp_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/webp"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -182,7 +184,7 @@ async def test_upload_media_webm_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "video/webm"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/videos/" ) @@ -196,8 +198,8 @@ async def test_upload_media_mismatched_signature(mock_settings, mock_storage_cli headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_invalid_signature(mock_settings, mock_storage_client): @@ -207,5 +209,5 @@ async def test_upload_media_invalid_signature(mock_settings, mock_storage_client headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) diff --git a/autogpt_platform/backend/backend/server/v2/store/model.py b/autogpt_platform/backend/backend/api/features/store/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/model.py rename to autogpt_platform/backend/backend/api/features/store/model.py diff --git a/autogpt_platform/backend/backend/server/v2/store/model_test.py b/autogpt_platform/backend/backend/api/features/store/model_test.py similarity index 83% rename from autogpt_platform/backend/backend/server/v2/store/model_test.py rename to autogpt_platform/backend/backend/api/features/store/model_test.py index c387dfdecb..3633e6549e 100644 --- a/autogpt_platform/backend/backend/server/v2/store/model_test.py +++ b/autogpt_platform/backend/backend/api/features/store/model_test.py @@ -2,11 +2,11 @@ import datetime import prisma.enums -import backend.server.v2.store.model +from . import model as store_model def test_pagination(): - pagination = backend.server.v2.store.model.Pagination( + pagination = store_model.Pagination( total_items=100, total_pages=5, current_page=2, page_size=20 ) assert pagination.total_items == 100 @@ -16,7 +16,7 @@ def test_pagination(): def test_store_agent(): - agent = backend.server.v2.store.model.StoreAgent( + agent = store_model.StoreAgent( slug="test-agent", agent_name="Test Agent", agent_image="test.jpg", @@ -34,9 +34,9 @@ def test_store_agent(): def test_store_agents_response(): - response = backend.server.v2.store.model.StoreAgentsResponse( + response = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="test-agent", agent_name="Test Agent", agent_image="test.jpg", @@ -48,7 +48,7 @@ def test_store_agents_response(): rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -57,7 +57,7 @@ def test_store_agents_response(): def test_store_agent_details(): - details = backend.server.v2.store.model.StoreAgentDetails( + details = store_model.StoreAgentDetails( store_listing_version_id="version123", slug="test-agent", agent_name="Test Agent", @@ -81,7 +81,7 @@ def test_store_agent_details(): def test_creator(): - creator = backend.server.v2.store.model.Creator( + creator = store_model.Creator( agent_rating=4.8, agent_runs=1000, name="Test Creator", @@ -96,9 +96,9 @@ def test_creator(): def test_creators_response(): - response = backend.server.v2.store.model.CreatorsResponse( + response = store_model.CreatorsResponse( creators=[ - backend.server.v2.store.model.Creator( + store_model.Creator( agent_rating=4.8, agent_runs=1000, name="Test Creator", @@ -109,7 +109,7 @@ def test_creators_response(): is_featured=False, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -118,7 +118,7 @@ def test_creators_response(): def test_creator_details(): - details = backend.server.v2.store.model.CreatorDetails( + details = store_model.CreatorDetails( name="Test Creator", username="creator1", description="Test description", @@ -135,7 +135,7 @@ def test_creator_details(): def test_store_submission(): - submission = backend.server.v2.store.model.StoreSubmission( + submission = store_model.StoreSubmission( agent_id="agent123", agent_version=1, sub_heading="Test subheading", @@ -154,9 +154,9 @@ def test_store_submission(): def test_store_submissions_response(): - response = backend.server.v2.store.model.StoreSubmissionsResponse( + response = store_model.StoreSubmissionsResponse( submissions=[ - backend.server.v2.store.model.StoreSubmission( + store_model.StoreSubmission( agent_id="agent123", agent_version=1, sub_heading="Test subheading", @@ -170,7 +170,7 @@ def test_store_submissions_response(): rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -179,7 +179,7 @@ def test_store_submissions_response(): def test_store_submission_request(): - request = backend.server.v2.store.model.StoreSubmissionRequest( + request = store_model.StoreSubmissionRequest( agent_id="agent123", agent_version=1, slug="test-agent", diff --git a/autogpt_platform/backend/backend/server/v2/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/store/routes.py rename to autogpt_platform/backend/backend/api/features/store/routes.py index b0c1df6e22..6a9bb05291 100644 --- a/autogpt_platform/backend/backend/server/v2/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -9,14 +9,14 @@ import fastapi import fastapi.responses import backend.data.graph -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.db -import backend.server.v2.store.exceptions -import backend.server.v2.store.image_gen -import backend.server.v2.store.media -import backend.server.v2.store.model import backend.util.json +from . import cache as store_cache +from . import db as store_db +from . import image_gen as store_image_gen +from . import media as store_media +from . import model as store_model + logger = logging.getLogger(__name__) router = fastapi.APIRouter() @@ -32,7 +32,7 @@ router = fastapi.APIRouter() summary="Get user profile", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.ProfileDetails, + response_model=store_model.ProfileDetails, ) async def get_profile( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -41,7 +41,7 @@ async def get_profile( Get the profile details for the authenticated user. Cached for 1 hour per user. """ - profile = await backend.server.v2.store.db.get_user_profile(user_id) + profile = await store_db.get_user_profile(user_id) if profile is None: return fastapi.responses.JSONResponse( status_code=404, @@ -55,10 +55,10 @@ async def get_profile( summary="Update user profile", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.CreatorDetails, + response_model=store_model.CreatorDetails, ) async def update_or_create_profile( - profile: backend.server.v2.store.model.Profile, + profile: store_model.Profile, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -74,9 +74,7 @@ async def update_or_create_profile( Raises: HTTPException: If there is an error updating the profile """ - updated_profile = await backend.server.v2.store.db.update_profile( - user_id=user_id, profile=profile - ) + updated_profile = await store_db.update_profile(user_id=user_id, profile=profile) return updated_profile @@ -89,7 +87,7 @@ async def update_or_create_profile( "/agents", summary="List store agents", tags=["store", "public"], - response_model=backend.server.v2.store.model.StoreAgentsResponse, + response_model=store_model.StoreAgentsResponse, ) async def get_agents( featured: bool = False, @@ -152,7 +150,7 @@ async def get_agents( "/agents/{username}/{agent_name}", summary="Get specific agent", tags=["store", "public"], - response_model=backend.server.v2.store.model.StoreAgentDetails, + response_model=store_model.StoreAgentDetails, ) async def get_agent(username: str, agent_name: str): """ @@ -179,9 +177,7 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s """ Get Agent Graph from Store Listing Version ID. """ - graph = await backend.server.v2.store.db.get_available_graph( - store_listing_version_id - ) + graph = await store_db.get_available_graph(store_listing_version_id) return graph @@ -190,15 +186,13 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s summary="Get agent by version", tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreAgentDetails, + response_model=store_model.StoreAgentDetails, ) async def get_store_agent(store_listing_version_id: str): """ Get Store Agent Details from Store Listing Version ID. """ - agent = await backend.server.v2.store.db.get_store_agent_by_version_id( - store_listing_version_id - ) + agent = await store_db.get_store_agent_by_version_id(store_listing_version_id) return agent @@ -208,12 +202,12 @@ async def get_store_agent(store_listing_version_id: str): summary="Create agent review", tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreReview, + response_model=store_model.StoreReview, ) async def create_review( username: str, agent_name: str, - review: backend.server.v2.store.model.StoreReviewCreate, + review: store_model.StoreReviewCreate, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -231,7 +225,7 @@ async def create_review( username = urllib.parse.unquote(username).lower() agent_name = urllib.parse.unquote(agent_name).lower() # Create the review - created_review = await backend.server.v2.store.db.create_store_review( + created_review = await store_db.create_store_review( user_id=user_id, store_listing_version_id=review.store_listing_version_id, score=review.score, @@ -250,7 +244,7 @@ async def create_review( "/creators", summary="List store creators", tags=["store", "public"], - response_model=backend.server.v2.store.model.CreatorsResponse, + response_model=store_model.CreatorsResponse, ) async def get_creators( featured: bool = False, @@ -295,7 +289,7 @@ async def get_creators( "/creator/{username}", summary="Get creator details", tags=["store", "public"], - response_model=backend.server.v2.store.model.CreatorDetails, + response_model=store_model.CreatorDetails, ) async def get_creator( username: str, @@ -319,7 +313,7 @@ async def get_creator( summary="Get my agents", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.MyAgentsResponse, + response_model=store_model.MyAgentsResponse, ) async def get_my_agents( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -329,9 +323,7 @@ async def get_my_agents( """ Get user's own agents. """ - agents = await backend.server.v2.store.db.get_my_agents( - user_id, page=page, page_size=page_size - ) + agents = await store_db.get_my_agents(user_id, page=page, page_size=page_size) return agents @@ -356,7 +348,7 @@ async def delete_submission( Returns: bool: True if the submission was successfully deleted, False otherwise """ - result = await backend.server.v2.store.db.delete_store_submission( + result = await store_db.delete_store_submission( user_id=user_id, submission_id=submission_id, ) @@ -369,7 +361,7 @@ async def delete_submission( summary="List my submissions", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmissionsResponse, + response_model=store_model.StoreSubmissionsResponse, ) async def get_submissions( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -399,7 +391,7 @@ async def get_submissions( raise fastapi.HTTPException( status_code=422, detail="Page size must be greater than 0" ) - listings = await backend.server.v2.store.db.get_store_submissions( + listings = await store_db.get_store_submissions( user_id=user_id, page=page, page_size=page_size, @@ -412,10 +404,10 @@ async def get_submissions( summary="Create store submission", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def create_submission( - submission_request: backend.server.v2.store.model.StoreSubmissionRequest, + submission_request: store_model.StoreSubmissionRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -431,7 +423,7 @@ async def create_submission( Raises: HTTPException: If there is an error creating the submission """ - result = await backend.server.v2.store.db.create_store_submission( + result = await store_db.create_store_submission( user_id=user_id, agent_id=submission_request.agent_id, agent_version=submission_request.agent_version, @@ -456,11 +448,11 @@ async def create_submission( summary="Edit store submission", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def edit_submission( store_listing_version_id: str, - submission_request: backend.server.v2.store.model.StoreSubmissionEditRequest, + submission_request: store_model.StoreSubmissionEditRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -477,7 +469,7 @@ async def edit_submission( Raises: HTTPException: If there is an error editing the submission """ - result = await backend.server.v2.store.db.edit_store_submission( + result = await store_db.edit_store_submission( user_id=user_id, store_listing_version_id=store_listing_version_id, name=submission_request.name, @@ -518,9 +510,7 @@ async def upload_submission_media( Raises: HTTPException: If there is an error uploading the media """ - media_url = await backend.server.v2.store.media.upload_media( - user_id=user_id, file=file - ) + media_url = await store_media.upload_media(user_id=user_id, file=file) return media_url @@ -555,14 +545,12 @@ async def generate_image( # Use .jpeg here since we are generating JPEG images filename = f"agent_{agent_id}.jpeg" - existing_url = await backend.server.v2.store.media.check_media_exists( - user_id, filename - ) + existing_url = await store_media.check_media_exists(user_id, filename) if existing_url: logger.info(f"Using existing image for agent {agent_id}") return fastapi.responses.JSONResponse(content={"image_url": existing_url}) # Generate agent image as JPEG - image = await backend.server.v2.store.image_gen.generate_agent_image(agent=agent) + image = await store_image_gen.generate_agent_image(agent=agent) # Create UploadFile with the correct filename and content_type image_file = fastapi.UploadFile( @@ -570,7 +558,7 @@ async def generate_image( filename=filename, ) - image_url = await backend.server.v2.store.media.upload_media( + image_url = await store_media.upload_media( user_id=user_id, file=image_file, use_file_name=True ) @@ -599,7 +587,7 @@ async def download_agent_file( Raises: HTTPException: If the agent is not found or an unexpected error occurs. """ - graph_data = await backend.server.v2.store.db.get_agent(store_listing_version_id) + graph_data = await store_db.get_agent(store_listing_version_id) file_name = f"agent_{graph_data.id}_v{graph_data.version or 'latest'}.json" # Sending graph as a stream (similar to marketplace v1) diff --git a/autogpt_platform/backend/backend/server/v2/store/routes_test.py b/autogpt_platform/backend/backend/api/features/store/routes_test.py similarity index 76% rename from autogpt_platform/backend/backend/server/v2/store/routes_test.py rename to autogpt_platform/backend/backend/api/features/store/routes_test.py index 03322ee988..b9c040c149 100644 --- a/autogpt_platform/backend/backend/server/v2/store/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/store/routes_test.py @@ -8,15 +8,15 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.store.model -import backend.server.v2.store.routes +from . import model as store_model +from . import routes as store_routes # Using a fixed timestamp for reproducible tests # 2023 date is intentionally used to ensure tests work regardless of current year FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0) app = fastapi.FastAPI() -app.include_router(backend.server.v2.store.routes.router) +app.include_router(store_routes.router) client = fastapi.testclient.TestClient(app) @@ -35,23 +35,21 @@ def test_get_agents_defaults( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=0, total_items=0, total_pages=0, page_size=10, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert data.pagination.total_pages == 0 assert data.agents == [] @@ -72,9 +70,9 @@ def test_get_agents_featured( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="featured-agent", agent_name="Featured Agent", agent_image="featured.jpg", @@ -86,20 +84,18 @@ def test_get_agents_featured( rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?featured=true") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].slug == "featured-agent" snapshot.snapshot_dir = "snapshots" @@ -119,9 +115,9 @@ def test_get_agents_by_creator( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="creator-agent", agent_name="Creator Agent", agent_image="agent.jpg", @@ -133,20 +129,18 @@ def test_get_agents_by_creator( rating=4.0, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?creator=specific-creator") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].creator == "specific-creator" snapshot.snapshot_dir = "snapshots" @@ -166,9 +160,9 @@ def test_get_agents_sorted( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="top-agent", agent_name="Top Agent", agent_image="top.jpg", @@ -180,20 +174,18 @@ def test_get_agents_sorted( rating=5.0, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?sorted_by=runs") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].runs == 1000 snapshot.snapshot_dir = "snapshots" @@ -213,9 +205,9 @@ def test_get_agents_search( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="search-agent", agent_name="Search Agent", agent_image="search.jpg", @@ -227,20 +219,18 @@ def test_get_agents_search( rating=4.2, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?search_query=specific") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert "specific" in data.agents[0].description.lower() snapshot.snapshot_dir = "snapshots" @@ -260,9 +250,9 @@ def test_get_agents_category( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="category-agent", agent_name="Category Agent", agent_image="category.jpg", @@ -274,20 +264,18 @@ def test_get_agents_category( rating=4.1, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?category=test-category") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 snapshot.snapshot_dir = "snapshots" snapshot.assert_match(json.dumps(response.json(), indent=2), "agts_category") @@ -306,9 +294,9 @@ def test_get_agents_pagination( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug=f"agent-{i}", agent_name=f"Agent {i}", agent_image=f"agent{i}.jpg", @@ -321,20 +309,18 @@ def test_get_agents_pagination( ) for i in range(5) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=15, total_pages=3, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 5 assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 @@ -365,7 +351,7 @@ def test_get_agents_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.assert_not_called() @@ -373,7 +359,7 @@ def test_get_agent_details( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentDetails( + mocked_value = store_model.StoreAgentDetails( store_listing_version_id="test-version-id", slug="test-agent", agent_name="Test Agent", @@ -390,15 +376,13 @@ def test_get_agent_details( versions=["1.0.0", "1.1.0"], last_updated=FIXED_NOW, ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agent_details") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agent_details") mock_db_call.return_value = mocked_value response = client.get("/agents/creator1/test-agent") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentDetails.model_validate( - response.json() - ) + data = store_model.StoreAgentDetails.model_validate(response.json()) assert data.agent_name == "Test Agent" assert data.creator == "creator1" snapshot.snapshot_dir = "snapshots" @@ -410,24 +394,22 @@ def test_get_creators_defaults( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorsResponse( + mocked_value = store_model.CreatorsResponse( creators=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=0, total_items=0, total_pages=0, page_size=10, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.return_value = mocked_value response = client.get("/creators") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorsResponse.model_validate( - response.json() - ) + data = store_model.CreatorsResponse.model_validate(response.json()) assert data.pagination.total_pages == 0 assert data.creators == [] snapshot.snapshot_dir = "snapshots" @@ -441,9 +423,9 @@ def test_get_creators_pagination( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorsResponse( + mocked_value = store_model.CreatorsResponse( creators=[ - backend.server.v2.store.model.Creator( + store_model.Creator( name=f"Creator {i}", username=f"creator{i}", description=f"Creator {i} description", @@ -455,22 +437,20 @@ def test_get_creators_pagination( ) for i in range(5) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=15, total_pages=3, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.return_value = mocked_value response = client.get("/creators?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorsResponse.model_validate( - response.json() - ) + data = store_model.CreatorsResponse.model_validate(response.json()) assert len(data.creators) == 5 assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 @@ -495,7 +475,7 @@ def test_get_creators_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.assert_not_called() @@ -503,7 +483,7 @@ def test_get_creator_details( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorDetails( + mocked_value = store_model.CreatorDetails( name="Test User", username="creator1", description="Test creator description", @@ -513,13 +493,15 @@ def test_get_creator_details( agent_runs=1000, top_categories=["category1", "category2"], ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creator_details") + mock_db_call = mocker.patch( + "backend.api.features.store.db.get_store_creator_details" + ) mock_db_call.return_value = mocked_value response = client.get("/creator/creator1") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorDetails.model_validate(response.json()) + data = store_model.CreatorDetails.model_validate(response.json()) assert data.username == "creator1" assert data.name == "Test User" snapshot.snapshot_dir = "snapshots" @@ -532,9 +514,9 @@ def test_get_submissions_success( snapshot: Snapshot, test_user_id: str, ) -> None: - mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse( + mocked_value = store_model.StoreSubmissionsResponse( submissions=[ - backend.server.v2.store.model.StoreSubmission( + store_model.StoreSubmission( name="Test Agent", description="Test agent description", image_urls=["test.jpg"], @@ -550,22 +532,20 @@ def test_get_submissions_success( categories=["test-category"], ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.return_value = mocked_value response = client.get("/submissions") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate( - response.json() - ) + data = store_model.StoreSubmissionsResponse.model_validate(response.json()) assert len(data.submissions) == 1 assert data.submissions[0].name == "Test Agent" assert data.pagination.current_page == 1 @@ -579,24 +559,22 @@ def test_get_submissions_pagination( snapshot: Snapshot, test_user_id: str, ) -> None: - mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse( + mocked_value = store_model.StoreSubmissionsResponse( submissions=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=10, total_pages=2, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.return_value = mocked_value response = client.get("/submissions?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate( - response.json() - ) + data = store_model.StoreSubmissionsResponse.model_validate(response.json()) assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 snapshot.snapshot_dir = "snapshots" @@ -618,5 +596,5 @@ def test_get_submissions_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.assert_not_called() diff --git a/autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py b/autogpt_platform/backend/backend/api/features/store/test_cache_delete.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py rename to autogpt_platform/backend/backend/api/features/store/test_cache_delete.py index 4111de0ee8..dd9be1f4ab 100644 --- a/autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py +++ b/autogpt_platform/backend/backend/api/features/store/test_cache_delete.py @@ -8,10 +8,11 @@ from unittest.mock import AsyncMock, patch import pytest -from backend.server.v2.store import cache as store_cache -from backend.server.v2.store.model import StoreAgent, StoreAgentsResponse from backend.util.models import Pagination +from . import cache as store_cache +from .model import StoreAgent, StoreAgentsResponse + class TestCacheDeletion: """Test cache deletion functionality for store routes.""" @@ -43,7 +44,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ) as mock_db: @@ -152,7 +153,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ): @@ -203,7 +204,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ) as mock_db: diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/api/features/v1.py similarity index 98% rename from autogpt_platform/backend/backend/server/routers/v1.py rename to autogpt_platform/backend/backend/api/features/v1.py index e5e74690f8..9b05b4755f 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/api/features/v1.py @@ -28,9 +28,18 @@ from pydantic import BaseModel from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND from typing_extensions import Optional, TypedDict -import backend.server.integrations.router -import backend.server.routers.analytics -import backend.server.v2.library.db as library_db +from backend.api.model import ( + CreateAPIKeyRequest, + CreateAPIKeyResponse, + CreateGraph, + GraphExecutionSource, + RequestTopUp, + SetGraphActiveVersion, + TimezoneResponse, + UpdatePermissionsRequest, + UpdateTimezoneRequest, + UploadFileResponse, +) from backend.data import execution as execution_db from backend.data import graph as graph_db from backend.data.auth import api_key as api_key_db @@ -79,19 +88,6 @@ from backend.monitoring.instrumentation import ( record_graph_execution, record_graph_operation, ) -from backend.server.model import ( - CreateAPIKeyRequest, - CreateAPIKeyResponse, - CreateGraph, - GraphExecutionSource, - RequestTopUp, - SetGraphActiveVersion, - TimezoneResponse, - UpdatePermissionsRequest, - UpdateTimezoneRequest, - UploadFileResponse, -) -from backend.server.v2.store.model import StoreAgentDetails from backend.util.cache import cached from backend.util.clients import get_scheduler_client from backend.util.cloud_storage import get_cloud_storage_handler @@ -105,6 +101,10 @@ from backend.util.timezone_utils import ( ) from backend.util.virus_scanner import scan_content_safe +from .library import db as library_db +from .library import model as library_model +from .store.model import StoreAgentDetails + def _create_file_size_error(size_bytes: int, max_size_mb: int) -> HTTPException: """Create standardized file size error response.""" @@ -118,76 +118,9 @@ settings = Settings() logger = logging.getLogger(__name__) -async def hide_activity_summaries_if_disabled( - executions: list[execution_db.GraphExecutionMeta], user_id: str -) -> list[execution_db.GraphExecutionMeta]: - """Hide activity summaries and scores if AI_ACTIVITY_STATUS feature is disabled.""" - if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): - return executions # Return as-is if feature is enabled - - # Filter out activity features if disabled - filtered_executions = [] - for execution in executions: - if execution.stats: - filtered_stats = execution.stats.without_activity_features() - execution = execution.model_copy(update={"stats": filtered_stats}) - filtered_executions.append(execution) - return filtered_executions - - -async def hide_activity_summary_if_disabled( - execution: execution_db.GraphExecution | execution_db.GraphExecutionWithNodes, - user_id: str, -) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes: - """Hide activity summary and score for a single execution if AI_ACTIVITY_STATUS feature is disabled.""" - if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): - return execution # Return as-is if feature is enabled - - # Filter out activity features if disabled - if execution.stats: - filtered_stats = execution.stats.without_activity_features() - return execution.model_copy(update={"stats": filtered_stats}) - return execution - - -async def _update_library_agent_version_and_settings( - user_id: str, agent_graph: graph_db.GraphModel -) -> library_db.library_model.LibraryAgent: - # Keep the library agent up to date with the new active version - library = await library_db.update_agent_version_in_library( - user_id, agent_graph.id, agent_graph.version - ) - # If the graph has HITL node, initialize the setting if it's not already set. - if ( - agent_graph.has_human_in_the_loop - and library.settings.human_in_the_loop_safe_mode is None - ): - await library_db.update_library_agent_settings( - user_id=user_id, - agent_id=library.id, - settings=library.settings.model_copy( - update={"human_in_the_loop_safe_mode": True} - ), - ) - return library - - # Define the API routes v1_router = APIRouter() -v1_router.include_router( - backend.server.integrations.router.router, - prefix="/integrations", - tags=["integrations"], -) - -v1_router.include_router( - backend.server.routers.analytics.router, - prefix="/analytics", - tags=["analytics"], - dependencies=[Security(requires_user)], -) - ######################################################## ##################### Auth ############################# @@ -953,6 +886,28 @@ async def set_graph_active_version( await on_graph_deactivate(current_active_graph, user_id=user_id) +async def _update_library_agent_version_and_settings( + user_id: str, agent_graph: graph_db.GraphModel +) -> library_model.LibraryAgent: + # Keep the library agent up to date with the new active version + library = await library_db.update_agent_version_in_library( + user_id, agent_graph.id, agent_graph.version + ) + # If the graph has HITL node, initialize the setting if it's not already set. + if ( + agent_graph.has_human_in_the_loop + and library.settings.human_in_the_loop_safe_mode is None + ): + await library_db.update_library_agent_settings( + user_id=user_id, + agent_id=library.id, + settings=library.settings.model_copy( + update={"human_in_the_loop_safe_mode": True} + ), + ) + return library + + @v1_router.patch( path="/graphs/{graph_id}/settings", summary="Update graph settings", @@ -1155,6 +1110,23 @@ async def list_graph_executions( ) +async def hide_activity_summaries_if_disabled( + executions: list[execution_db.GraphExecutionMeta], user_id: str +) -> list[execution_db.GraphExecutionMeta]: + """Hide activity summaries and scores if AI_ACTIVITY_STATUS feature is disabled.""" + if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): + return executions # Return as-is if feature is enabled + + # Filter out activity features if disabled + filtered_executions = [] + for execution in executions: + if execution.stats: + filtered_stats = execution.stats.without_activity_features() + execution = execution.model_copy(update={"stats": filtered_stats}) + filtered_executions.append(execution) + return filtered_executions + + @v1_router.get( path="/graphs/{graph_id}/executions/{graph_exec_id}", summary="Get execution details", @@ -1197,6 +1169,21 @@ async def get_graph_execution( return result +async def hide_activity_summary_if_disabled( + execution: execution_db.GraphExecution | execution_db.GraphExecutionWithNodes, + user_id: str, +) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes: + """Hide activity summary and score for a single execution if AI_ACTIVITY_STATUS feature is disabled.""" + if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): + return execution # Return as-is if feature is enabled + + # Filter out activity features if disabled + if execution.stats: + filtered_stats = execution.stats.without_activity_features() + return execution.model_copy(update={"stats": filtered_stats}) + return execution + + @v1_router.delete( path="/executions/{graph_exec_id}", summary="Delete graph execution", @@ -1257,7 +1244,7 @@ async def enable_execution_sharing( ) # Return the share URL - frontend_url = Settings().config.frontend_base_url or "http://localhost:3000" + frontend_url = settings.config.frontend_base_url or "http://localhost:3000" share_url = f"{frontend_url}/share/{share_token}" return ShareResponse(share_url=share_url, share_token=share_token) diff --git a/autogpt_platform/backend/backend/server/routers/v1_test.py b/autogpt_platform/backend/backend/api/features/v1_test.py similarity index 91% rename from autogpt_platform/backend/backend/server/routers/v1_test.py rename to autogpt_platform/backend/backend/api/features/v1_test.py index 69e1b5f2ae..a186d38810 100644 --- a/autogpt_platform/backend/backend/server/routers/v1_test.py +++ b/autogpt_platform/backend/backend/api/features/v1_test.py @@ -11,13 +11,13 @@ import starlette.datastructures from fastapi import HTTPException, UploadFile from pytest_snapshot.plugin import Snapshot -import backend.server.routers.v1 as v1_routes from backend.data.credit import AutoTopUpConfig from backend.data.graph import GraphModel -from backend.server.routers.v1 import upload_file + +from .v1 import upload_file, v1_router app = fastapi.FastAPI() -app.include_router(v1_routes.v1_router) +app.include_router(v1_router) client = fastapi.testclient.TestClient(app) @@ -50,7 +50,7 @@ def test_get_or_create_user_route( } mocker.patch( - "backend.server.routers.v1.get_or_create_user", + "backend.api.features.v1.get_or_create_user", return_value=mock_user, ) @@ -71,7 +71,7 @@ def test_update_user_email_route( ) -> None: """Test update user email endpoint""" mocker.patch( - "backend.server.routers.v1.update_user_email", + "backend.api.features.v1.update_user_email", return_value=None, ) @@ -107,7 +107,7 @@ def test_get_graph_blocks( # Mock get_blocks mocker.patch( - "backend.server.routers.v1.get_blocks", + "backend.api.features.v1.get_blocks", return_value={"test-block": lambda: mock_block}, ) @@ -146,7 +146,7 @@ def test_execute_graph_block( mock_block.execute = mock_execute mocker.patch( - "backend.server.routers.v1.get_block", + "backend.api.features.v1.get_block", return_value=mock_block, ) @@ -155,7 +155,7 @@ def test_execute_graph_block( mock_user.timezone = "UTC" mocker.patch( - "backend.server.routers.v1.get_user_by_id", + "backend.api.features.v1.get_user_by_id", return_value=mock_user, ) @@ -181,7 +181,7 @@ def test_execute_graph_block_not_found( ) -> None: """Test execute block with non-existent block""" mocker.patch( - "backend.server.routers.v1.get_block", + "backend.api.features.v1.get_block", return_value=None, ) @@ -200,7 +200,7 @@ def test_get_user_credits( mock_credit_model = Mock() mock_credit_model.get_credits = AsyncMock(return_value=1000) mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -227,7 +227,7 @@ def test_request_top_up( return_value="https://checkout.example.com/session123" ) mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -254,7 +254,7 @@ def test_get_auto_top_up( mock_config = AutoTopUpConfig(threshold=100, amount=500) mocker.patch( - "backend.server.routers.v1.get_auto_top_up", + "backend.api.features.v1.get_auto_top_up", return_value=mock_config, ) @@ -279,7 +279,7 @@ def test_configure_auto_top_up( """Test configure auto top-up endpoint - this test would have caught the enum casting bug""" # Mock the set_auto_top_up function to avoid database operations mocker.patch( - "backend.server.routers.v1.set_auto_top_up", + "backend.api.features.v1.set_auto_top_up", return_value=None, ) @@ -289,7 +289,7 @@ def test_configure_auto_top_up( mock_credit_model.top_up_credits.return_value = None mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -311,7 +311,7 @@ def test_configure_auto_top_up_validation_errors( ) -> None: """Test configure auto top-up endpoint validation""" # Mock set_auto_top_up to avoid database operations for successful case - mocker.patch("backend.server.routers.v1.set_auto_top_up") + mocker.patch("backend.api.features.v1.set_auto_top_up") # Mock credit model to avoid Stripe API calls for the successful case mock_credit_model = mocker.AsyncMock() @@ -319,7 +319,7 @@ def test_configure_auto_top_up_validation_errors( mock_credit_model.top_up_credits.return_value = None mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -393,7 +393,7 @@ def test_get_graph( ) mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=mock_graph, ) @@ -415,7 +415,7 @@ def test_get_graph_not_found( ) -> None: """Test get graph with non-existent ID""" mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=None, ) @@ -443,15 +443,15 @@ def test_delete_graph( ) mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=mock_graph, ) mocker.patch( - "backend.server.routers.v1.on_graph_deactivate", + "backend.api.features.v1.on_graph_deactivate", return_value=None, ) mocker.patch( - "backend.server.routers.v1.graph_db.delete_graph", + "backend.api.features.v1.graph_db.delete_graph", return_value=3, # Number of versions deleted ) @@ -498,8 +498,8 @@ async def test_upload_file_success(test_user_id: str): ) # Mock dependencies - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -550,8 +550,8 @@ async def test_upload_file_no_filename(test_user_id: str): ), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -610,7 +610,7 @@ async def test_upload_file_virus_scan_failure(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan: + with patch("backend.api.features.v1.scan_content_safe") as mock_scan: # Mock virus scan to raise exception mock_scan.side_effect = RuntimeError("Virus detected!") @@ -631,8 +631,8 @@ async def test_upload_file_cloud_storage_failure(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -678,8 +678,8 @@ async def test_upload_file_gcs_not_configured_fallback(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None diff --git a/autogpt_platform/backend/backend/server/middleware/security.py b/autogpt_platform/backend/backend/api/middleware/security.py similarity index 100% rename from autogpt_platform/backend/backend/server/middleware/security.py rename to autogpt_platform/backend/backend/api/middleware/security.py diff --git a/autogpt_platform/backend/backend/server/middleware/security_test.py b/autogpt_platform/backend/backend/api/middleware/security_test.py similarity index 98% rename from autogpt_platform/backend/backend/server/middleware/security_test.py rename to autogpt_platform/backend/backend/api/middleware/security_test.py index 462e5b27ed..57137afc9a 100644 --- a/autogpt_platform/backend/backend/server/middleware/security_test.py +++ b/autogpt_platform/backend/backend/api/middleware/security_test.py @@ -3,7 +3,7 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from starlette.applications import Starlette -from backend.server.middleware.security import SecurityHeadersMiddleware +from backend.api.middleware.security import SecurityHeadersMiddleware @pytest.fixture diff --git a/autogpt_platform/backend/backend/server/model.py b/autogpt_platform/backend/backend/api/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/model.py rename to autogpt_platform/backend/backend/api/model.py diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/api/rest_api.py similarity index 78% rename from autogpt_platform/backend/backend/server/rest_api.py rename to autogpt_platform/backend/backend/api/rest_api.py index 5db2b18c27..147f62e781 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/api/rest_api.py @@ -16,36 +16,33 @@ from fastapi.middleware.gzip import GZipMiddleware from fastapi.routing import APIRoute from prisma.errors import PrismaError +import backend.api.features.admin.credit_admin_routes +import backend.api.features.admin.execution_analytics_routes +import backend.api.features.admin.store_admin_routes +import backend.api.features.builder +import backend.api.features.builder.routes +import backend.api.features.chat.routes as chat_routes +import backend.api.features.executions.review.routes +import backend.api.features.library.db +import backend.api.features.library.model +import backend.api.features.library.routes +import backend.api.features.oauth +import backend.api.features.otto.routes +import backend.api.features.postmark.postmark +import backend.api.features.store.model +import backend.api.features.store.routes +import backend.api.features.v1 import backend.data.block import backend.data.db import backend.data.graph import backend.data.user import backend.integrations.webhooks.utils -import backend.server.routers.oauth -import backend.server.routers.postmark.postmark -import backend.server.routers.v1 -import backend.server.v2.admin.credit_admin_routes -import backend.server.v2.admin.execution_analytics_routes -import backend.server.v2.admin.store_admin_routes -import backend.server.v2.builder -import backend.server.v2.builder.routes -import backend.server.v2.chat.routes as chat_routes -import backend.server.v2.executions.review.routes -import backend.server.v2.library.db -import backend.server.v2.library.model -import backend.server.v2.library.routes -import backend.server.v2.otto.routes -import backend.server.v2.store.model -import backend.server.v2.store.routes import backend.util.service import backend.util.settings from backend.blocks.llm import LlmModel from backend.data.model import Credentials from backend.integrations.providers import ProviderName from backend.monitoring.instrumentation import instrument_fastapi -from backend.server.external.api import external_app -from backend.server.middleware.security import SecurityHeadersMiddleware -from backend.server.utils.cors import build_cors_params from backend.util import json from backend.util.cloud_storage import shutdown_cloud_storage_handler from backend.util.exceptions import ( @@ -56,6 +53,13 @@ from backend.util.exceptions import ( from backend.util.feature_flag import initialize_launchdarkly, shutdown_launchdarkly from backend.util.service import UnhealthyServiceError +from .external.fastapi_app import external_api +from .features.analytics import router as analytics_router +from .features.integrations.router import router as integrations_router +from .middleware.security import SecurityHeadersMiddleware +from .utils.cors import build_cors_params +from .utils.openapi import sort_openapi + settings = backend.util.settings.Settings() logger = logging.getLogger(__name__) @@ -176,6 +180,9 @@ app.add_middleware(GZipMiddleware, minimum_size=50_000) # 50KB threshold # Add 401 responses to authenticated endpoints in OpenAPI spec add_auth_responses_to_openapi(app) +# Sort OpenAPI schema to eliminate diff on refactors +sort_openapi(app) + # Add Prometheus instrumentation instrument_fastapi( app, @@ -254,42 +261,52 @@ app.add_exception_handler(MissingConfigError, handle_internal_http_error(503)) app.add_exception_handler(ValueError, handle_internal_http_error(400)) app.add_exception_handler(Exception, handle_internal_http_error(500)) -app.include_router(backend.server.routers.v1.v1_router, tags=["v1"], prefix="/api") +app.include_router(backend.api.features.v1.v1_router, tags=["v1"], prefix="/api") app.include_router( - backend.server.v2.store.routes.router, tags=["v2"], prefix="/api/store" + integrations_router, + prefix="/api/integrations", + tags=["v1", "integrations"], ) app.include_router( - backend.server.v2.builder.routes.router, tags=["v2"], prefix="/api/builder" + analytics_router, + prefix="/api/analytics", + tags=["analytics"], ) app.include_router( - backend.server.v2.admin.store_admin_routes.router, + backend.api.features.store.routes.router, tags=["v2"], prefix="/api/store" +) +app.include_router( + backend.api.features.builder.routes.router, tags=["v2"], prefix="/api/builder" +) +app.include_router( + backend.api.features.admin.store_admin_routes.router, tags=["v2", "admin"], prefix="/api/store", ) app.include_router( - backend.server.v2.admin.credit_admin_routes.router, + backend.api.features.admin.credit_admin_routes.router, tags=["v2", "admin"], prefix="/api/credits", ) app.include_router( - backend.server.v2.admin.execution_analytics_routes.router, + backend.api.features.admin.execution_analytics_routes.router, tags=["v2", "admin"], prefix="/api/executions", ) app.include_router( - backend.server.v2.executions.review.routes.router, + backend.api.features.executions.review.routes.router, tags=["v2", "executions", "review"], prefix="/api/review", ) app.include_router( - backend.server.v2.library.routes.router, tags=["v2"], prefix="/api/library" + backend.api.features.library.routes.router, tags=["v2"], prefix="/api/library" ) app.include_router( - backend.server.v2.otto.routes.router, tags=["v2", "otto"], prefix="/api/otto" + backend.api.features.otto.routes.router, tags=["v2", "otto"], prefix="/api/otto" ) app.include_router( - backend.server.routers.postmark.postmark.router, + backend.api.features.postmark.postmark.router, tags=["v1", "email"], prefix="/api/email", ) @@ -299,12 +316,12 @@ app.include_router( prefix="/api/chat", ) app.include_router( - backend.server.routers.oauth.router, + backend.api.features.oauth.router, tags=["oauth"], prefix="/api/oauth", ) -app.mount("/external-api", external_app) +app.mount("/external-api", external_api) @app.get(path="/health", tags=["health"], dependencies=[]) @@ -357,7 +374,7 @@ class AgentServer(backend.util.service.AppProcess): graph_version: Optional[int] = None, node_input: Optional[dict[str, Any]] = None, ): - return await backend.server.routers.v1.execute_graph( + return await backend.api.features.v1.execute_graph( user_id=user_id, graph_id=graph_id, graph_version=graph_version, @@ -372,16 +389,16 @@ class AgentServer(backend.util.service.AppProcess): user_id: str, for_export: bool = False, ): - return await backend.server.routers.v1.get_graph( + return await backend.api.features.v1.get_graph( graph_id, user_id, graph_version, for_export ) @staticmethod async def test_create_graph( - create_graph: backend.server.routers.v1.CreateGraph, + create_graph: backend.api.features.v1.CreateGraph, user_id: str, ): - return await backend.server.routers.v1.create_new_graph(create_graph, user_id) + return await backend.api.features.v1.create_new_graph(create_graph, user_id) @staticmethod async def test_get_graph_run_status(graph_exec_id: str, user_id: str): @@ -397,45 +414,45 @@ class AgentServer(backend.util.service.AppProcess): @staticmethod async def test_delete_graph(graph_id: str, user_id: str): """Used for clean-up after a test run""" - await backend.server.v2.library.db.delete_library_agent_by_graph_id( + await backend.api.features.library.db.delete_library_agent_by_graph_id( graph_id=graph_id, user_id=user_id ) - return await backend.server.routers.v1.delete_graph(graph_id, user_id) + return await backend.api.features.v1.delete_graph(graph_id, user_id) @staticmethod async def test_get_presets(user_id: str, page: int = 1, page_size: int = 10): - return await backend.server.v2.library.routes.presets.list_presets( + return await backend.api.features.library.routes.presets.list_presets( user_id=user_id, page=page, page_size=page_size ) @staticmethod async def test_get_preset(preset_id: str, user_id: str): - return await backend.server.v2.library.routes.presets.get_preset( + return await backend.api.features.library.routes.presets.get_preset( preset_id=preset_id, user_id=user_id ) @staticmethod async def test_create_preset( - preset: backend.server.v2.library.model.LibraryAgentPresetCreatable, + preset: backend.api.features.library.model.LibraryAgentPresetCreatable, user_id: str, ): - return await backend.server.v2.library.routes.presets.create_preset( + return await backend.api.features.library.routes.presets.create_preset( preset=preset, user_id=user_id ) @staticmethod async def test_update_preset( preset_id: str, - preset: backend.server.v2.library.model.LibraryAgentPresetUpdatable, + preset: backend.api.features.library.model.LibraryAgentPresetUpdatable, user_id: str, ): - return await backend.server.v2.library.routes.presets.update_preset( + return await backend.api.features.library.routes.presets.update_preset( preset_id=preset_id, preset=preset, user_id=user_id ) @staticmethod async def test_delete_preset(preset_id: str, user_id: str): - return await backend.server.v2.library.routes.presets.delete_preset( + return await backend.api.features.library.routes.presets.delete_preset( preset_id=preset_id, user_id=user_id ) @@ -445,7 +462,7 @@ class AgentServer(backend.util.service.AppProcess): user_id: str, inputs: Optional[dict[str, Any]] = None, ): - return await backend.server.v2.library.routes.presets.execute_preset( + return await backend.api.features.library.routes.presets.execute_preset( preset_id=preset_id, user_id=user_id, inputs=inputs or {}, @@ -454,18 +471,20 @@ class AgentServer(backend.util.service.AppProcess): @staticmethod async def test_create_store_listing( - request: backend.server.v2.store.model.StoreSubmissionRequest, user_id: str + request: backend.api.features.store.model.StoreSubmissionRequest, user_id: str ): - return await backend.server.v2.store.routes.create_submission(request, user_id) + return await backend.api.features.store.routes.create_submission( + request, user_id + ) ### ADMIN ### @staticmethod async def test_review_store_listing( - request: backend.server.v2.store.model.ReviewSubmissionRequest, + request: backend.api.features.store.model.ReviewSubmissionRequest, user_id: str, ): - return await backend.server.v2.admin.store_admin_routes.review_submission( + return await backend.api.features.admin.store_admin_routes.review_submission( request.store_listing_version_id, request, user_id ) @@ -475,10 +494,7 @@ class AgentServer(backend.util.service.AppProcess): provider: ProviderName, credentials: Credentials, ) -> Credentials: - from backend.server.integrations.router import ( - create_credentials, - get_credential, - ) + from .features.integrations.router import create_credentials, get_credential try: return await create_credentials( diff --git a/autogpt_platform/backend/backend/server/test_helpers.py b/autogpt_platform/backend/backend/api/test_helpers.py similarity index 100% rename from autogpt_platform/backend/backend/server/test_helpers.py rename to autogpt_platform/backend/backend/api/test_helpers.py diff --git a/autogpt_platform/backend/backend/server/utils/api_key_auth.py b/autogpt_platform/backend/backend/api/utils/api_key_auth.py similarity index 100% rename from autogpt_platform/backend/backend/server/utils/api_key_auth.py rename to autogpt_platform/backend/backend/api/utils/api_key_auth.py diff --git a/autogpt_platform/backend/backend/server/utils/api_key_auth_test.py b/autogpt_platform/backend/backend/api/utils/api_key_auth_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/utils/api_key_auth_test.py rename to autogpt_platform/backend/backend/api/utils/api_key_auth_test.py index df6af6633c..39c3150561 100644 --- a/autogpt_platform/backend/backend/server/utils/api_key_auth_test.py +++ b/autogpt_platform/backend/backend/api/utils/api_key_auth_test.py @@ -8,7 +8,7 @@ import pytest from fastapi import HTTPException, Request from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN -from backend.server.utils.api_key_auth import APIKeyAuthenticator +from backend.api.utils.api_key_auth import APIKeyAuthenticator from backend.util.exceptions import MissingConfigError diff --git a/autogpt_platform/backend/backend/server/utils/cors.py b/autogpt_platform/backend/backend/api/utils/cors.py similarity index 100% rename from autogpt_platform/backend/backend/server/utils/cors.py rename to autogpt_platform/backend/backend/api/utils/cors.py diff --git a/autogpt_platform/backend/backend/server/utils/cors_test.py b/autogpt_platform/backend/backend/api/utils/cors_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/utils/cors_test.py rename to autogpt_platform/backend/backend/api/utils/cors_test.py index 94b3f17cfc..011974383b 100644 --- a/autogpt_platform/backend/backend/server/utils/cors_test.py +++ b/autogpt_platform/backend/backend/api/utils/cors_test.py @@ -1,6 +1,6 @@ import pytest -from backend.server.utils.cors import build_cors_params +from backend.api.utils.cors import build_cors_params from backend.util.settings import AppEnvironment diff --git a/autogpt_platform/backend/backend/api/utils/openapi.py b/autogpt_platform/backend/backend/api/utils/openapi.py new file mode 100644 index 0000000000..757b220fd0 --- /dev/null +++ b/autogpt_platform/backend/backend/api/utils/openapi.py @@ -0,0 +1,41 @@ +from fastapi import FastAPI + + +def sort_openapi(app: FastAPI) -> None: + """ + Patch a FastAPI instance's `openapi()` method to sort the endpoints, + schemas, and responses. + """ + wrapped_openapi = app.openapi + + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + openapi_schema = wrapped_openapi() + + # Sort endpoints + openapi_schema["paths"] = dict(sorted(openapi_schema["paths"].items())) + + # Sort endpoints -> methods + for p in openapi_schema["paths"].keys(): + openapi_schema["paths"][p] = dict( + sorted(openapi_schema["paths"][p].items()) + ) + + # Sort endpoints -> methods -> responses + for m in openapi_schema["paths"][p].keys(): + openapi_schema["paths"][p][m]["responses"] = dict( + sorted(openapi_schema["paths"][p][m]["responses"].items()) + ) + + # Sort schemas and responses as well + for k in openapi_schema["components"].keys(): + openapi_schema["components"][k] = dict( + sorted(openapi_schema["components"][k].items()) + ) + + app.openapi_schema = openapi_schema + return openapi_schema + + app.openapi = custom_openapi diff --git a/autogpt_platform/backend/backend/server/ws_api.py b/autogpt_platform/backend/backend/api/ws_api.py similarity index 98% rename from autogpt_platform/backend/backend/server/ws_api.py rename to autogpt_platform/backend/backend/api/ws_api.py index 344fd7e1a6..b71fdb3526 100644 --- a/autogpt_platform/backend/backend/server/ws_api.py +++ b/autogpt_platform/backend/backend/api/ws_api.py @@ -9,6 +9,14 @@ from autogpt_libs.auth.jwt_utils import parse_jwt_token from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect from starlette.middleware.cors import CORSMiddleware +from backend.api.conn_manager import ConnectionManager +from backend.api.model import ( + WSMessage, + WSMethod, + WSSubscribeGraphExecutionRequest, + WSSubscribeGraphExecutionsRequest, +) +from backend.api.utils.cors import build_cors_params from backend.data.execution import AsyncRedisExecutionEventBus from backend.data.notification_bus import AsyncRedisNotificationEventBus from backend.data.user import DEFAULT_USER_ID @@ -16,14 +24,6 @@ from backend.monitoring.instrumentation import ( instrument_fastapi, update_websocket_connections, ) -from backend.server.conn_manager import ConnectionManager -from backend.server.model import ( - WSMessage, - WSMethod, - WSSubscribeGraphExecutionRequest, - WSSubscribeGraphExecutionsRequest, -) -from backend.server.utils.cors import build_cors_params from backend.util.retry import continuous_retry from backend.util.service import AppProcess from backend.util.settings import AppEnvironment, Config, Settings diff --git a/autogpt_platform/backend/backend/server/ws_api_test.py b/autogpt_platform/backend/backend/api/ws_api_test.py similarity index 92% rename from autogpt_platform/backend/backend/server/ws_api_test.py rename to autogpt_platform/backend/backend/api/ws_api_test.py index 0bc9902145..edab1bbded 100644 --- a/autogpt_platform/backend/backend/server/ws_api_test.py +++ b/autogpt_platform/backend/backend/api/ws_api_test.py @@ -6,17 +6,17 @@ import pytest from fastapi import WebSocket, WebSocketDisconnect from pytest_snapshot.plugin import Snapshot -from backend.data.user import DEFAULT_USER_ID -from backend.server.conn_manager import ConnectionManager -from backend.server.test_helpers import override_config -from backend.server.ws_api import AppEnvironment, WebsocketServer, WSMessage, WSMethod -from backend.server.ws_api import app as websocket_app -from backend.server.ws_api import ( +from backend.api.conn_manager import ConnectionManager +from backend.api.test_helpers import override_config +from backend.api.ws_api import AppEnvironment, WebsocketServer, WSMessage, WSMethod +from backend.api.ws_api import app as websocket_app +from backend.api.ws_api import ( handle_subscribe, handle_unsubscribe, settings, websocket_router, ) +from backend.data.user import DEFAULT_USER_ID @pytest.fixture @@ -36,12 +36,12 @@ def test_websocket_server_uses_cors_helper(mocker) -> None: "allow_origins": ["https://app.example.com"], "allow_origin_regex": None, } - mocker.patch("backend.server.ws_api.uvicorn.run") + mocker.patch("backend.api.ws_api.uvicorn.run") cors_middleware = mocker.patch( - "backend.server.ws_api.CORSMiddleware", return_value=object() + "backend.api.ws_api.CORSMiddleware", return_value=object() ) build_cors = mocker.patch( - "backend.server.ws_api.build_cors_params", return_value=cors_params + "backend.api.ws_api.build_cors_params", return_value=cors_params ) with override_config( @@ -63,7 +63,7 @@ def test_websocket_server_uses_cors_helper(mocker) -> None: def test_websocket_server_blocks_localhost_in_production(mocker) -> None: - mocker.patch("backend.server.ws_api.uvicorn.run") + mocker.patch("backend.api.ws_api.uvicorn.run") with override_config( settings, "backend_cors_allow_origins", ["http://localhost:3000"] @@ -78,7 +78,7 @@ async def test_websocket_router_subscribe( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ @@ -128,7 +128,7 @@ async def test_websocket_router_unsubscribe( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ @@ -175,7 +175,7 @@ async def test_websocket_router_invalid_method( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ diff --git a/autogpt_platform/backend/backend/app.py b/autogpt_platform/backend/backend/app.py index 596962ae0b..0afed130ed 100644 --- a/autogpt_platform/backend/backend/app.py +++ b/autogpt_platform/backend/backend/app.py @@ -36,10 +36,10 @@ def main(**kwargs): Run all the processes required for the AutoGPT-server (REST and WebSocket APIs). """ + from backend.api.rest_api import AgentServer + from backend.api.ws_api import WebsocketServer from backend.executor import DatabaseManager, ExecutionManager, Scheduler from backend.notifications import NotificationManager - from backend.server.rest_api import AgentServer - from backend.server.ws_api import WebsocketServer run_processes( DatabaseManager().set_log_level("warning"), diff --git a/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py b/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py index 29f572d0d6..deff4278f9 100644 --- a/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py +++ b/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py @@ -5,10 +5,10 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from backend.api.model import CreateGraph +from backend.api.rest_api import AgentServer from backend.data.execution import ExecutionContext from backend.data.model import ProviderName, User -from backend.server.model import CreateGraph -from backend.server.rest_api import AgentServer from backend.usecases.sample import create_test_graph, create_test_user from backend.util.test import SpinTestServer, wait_execution diff --git a/autogpt_platform/backend/backend/cli.py b/autogpt_platform/backend/backend/cli.py index 0fc1bc53c1..d6eaca1dd0 100755 --- a/autogpt_platform/backend/backend/cli.py +++ b/autogpt_platform/backend/backend/cli.py @@ -244,11 +244,7 @@ def websocket(server_address: str, graph_exec_id: str): import websockets.asyncio.client - from backend.server.ws_api import ( - WSMessage, - WSMethod, - WSSubscribeGraphExecutionRequest, - ) + from backend.api.ws_api import WSMessage, WSMethod, WSSubscribeGraphExecutionRequest async def send_message(server_address: str): uri = f"ws://{server_address}" diff --git a/autogpt_platform/backend/backend/cli/generate_openapi_json.py b/autogpt_platform/backend/backend/cli/generate_openapi_json.py index 313e603c44..de74c0b5d2 100644 --- a/autogpt_platform/backend/backend/cli/generate_openapi_json.py +++ b/autogpt_platform/backend/backend/cli/generate_openapi_json.py @@ -2,7 +2,7 @@ """ Script to generate OpenAPI JSON specification for the FastAPI app. -This script imports the FastAPI app from backend.server.rest_api and outputs +This script imports the FastAPI app from backend.api.rest_api and outputs the OpenAPI specification as JSON to stdout or a specified file. Usage: @@ -46,7 +46,7 @@ def main(output: Path, pretty: bool): def get_openapi_schema(): """Get the OpenAPI schema from the FastAPI app""" - from backend.server.rest_api import app + from backend.api.rest_api import app return app.openapi() diff --git a/autogpt_platform/backend/backend/data/__init__.py b/autogpt_platform/backend/backend/data/__init__.py index 31ab09a5df..c98667e362 100644 --- a/autogpt_platform/backend/backend/data/__init__.py +++ b/autogpt_platform/backend/backend/data/__init__.py @@ -1,4 +1,4 @@ -from backend.server.v2.library.model import LibraryAgentPreset +from backend.api.features.library.model import LibraryAgentPreset from .graph import NodeModel from .integrations import Webhook # noqa: F401 diff --git a/autogpt_platform/backend/backend/data/credit.py b/autogpt_platform/backend/backend/data/credit.py index a8942d3b2e..95f0b158e1 100644 --- a/autogpt_platform/backend/backend/data/credit.py +++ b/autogpt_platform/backend/backend/data/credit.py @@ -16,6 +16,7 @@ from prisma.models import CreditRefundRequest, CreditTransaction, User, UserBala from prisma.types import CreditRefundRequestCreateInput, CreditTransactionWhereInput from pydantic import BaseModel +from backend.api.features.admin.model import UserHistoryResponse from backend.data.block_cost_config import BLOCK_COSTS from backend.data.db import query_raw_with_schema from backend.data.includes import MAX_CREDIT_REFUND_REQUESTS_FETCH @@ -29,7 +30,6 @@ from backend.data.model import ( from backend.data.notifications import NotificationEventModel, RefundRequestData from backend.data.user import get_user_by_id, get_user_email_by_id from backend.notifications.notifications import queue_notification_async -from backend.server.v2.admin.model import UserHistoryResponse from backend.util.exceptions import InsufficientBalanceError from backend.util.feature_flag import Flag, is_feature_enabled from backend.util.json import SafeJson, dumps diff --git a/autogpt_platform/backend/backend/data/db.py b/autogpt_platform/backend/backend/data/db.py index b64ce4ef5c..31a27e9163 100644 --- a/autogpt_platform/backend/backend/data/db.py +++ b/autogpt_platform/backend/backend/data/db.py @@ -111,7 +111,7 @@ def get_database_schema() -> str: async def query_raw_with_schema(query_template: str, *args) -> list[dict]: """Execute raw SQL query with proper schema handling.""" schema = get_database_schema() - schema_prefix = f"{schema}." if schema != "public" else "" + schema_prefix = f'"{schema}".' if schema != "public" else "" formatted_query = query_template.format(schema_prefix=schema_prefix) import prisma as prisma_module diff --git a/autogpt_platform/backend/backend/data/graph_test.py b/autogpt_platform/backend/backend/data/graph_test.py index e570392658..044d75e0ca 100644 --- a/autogpt_platform/backend/backend/data/graph_test.py +++ b/autogpt_platform/backend/backend/data/graph_test.py @@ -6,14 +6,14 @@ import fastapi.exceptions import pytest from pytest_snapshot.plugin import Snapshot -import backend.server.v2.store.model as store +import backend.api.features.store.model as store +from backend.api.model import CreateGraph from backend.blocks.basic import StoreValueBlock from backend.blocks.io import AgentInputBlock, AgentOutputBlock from backend.data.block import BlockSchema, BlockSchemaInput from backend.data.graph import Graph, Link, Node from backend.data.model import SchemaField from backend.data.user import DEFAULT_USER_ID -from backend.server.model import CreateGraph from backend.usecases.sample import create_test_user from backend.util.test import SpinTestServer diff --git a/autogpt_platform/backend/backend/data/human_review.py b/autogpt_platform/backend/backend/data/human_review.py index 11b87ec1dd..de7a30759e 100644 --- a/autogpt_platform/backend/backend/data/human_review.py +++ b/autogpt_platform/backend/backend/data/human_review.py @@ -13,7 +13,7 @@ from prisma.models import PendingHumanReview from prisma.types import PendingHumanReviewUpdateInput from pydantic import BaseModel -from backend.server.v2.executions.review.model import ( +from backend.api.features.executions.review.model import ( PendingHumanReviewModel, SafeJsonData, ) diff --git a/autogpt_platform/backend/backend/data/integrations.py b/autogpt_platform/backend/backend/data/integrations.py index 0f328e81b7..5f44f928bd 100644 --- a/autogpt_platform/backend/backend/data/integrations.py +++ b/autogpt_platform/backend/backend/data/integrations.py @@ -23,7 +23,7 @@ from backend.util.exceptions import NotFoundError from backend.util.json import SafeJson if TYPE_CHECKING: - from backend.server.v2.library.model import LibraryAgentPreset + from backend.api.features.library.model import LibraryAgentPreset from .db import BaseDbModel from .graph import NodeModel @@ -79,7 +79,7 @@ class WebhookWithRelations(Webhook): # integrations.py → library/model.py → integrations.py (for Webhook) # Runtime import is used in WebhookWithRelations.from_db() method instead # Import at runtime to avoid circular dependency - from backend.server.v2.library.model import LibraryAgentPreset + from backend.api.features.library.model import LibraryAgentPreset return WebhookWithRelations( **Webhook.from_db(webhook).model_dump(), @@ -285,8 +285,8 @@ async def unlink_webhook_from_graph( user_id: The ID of the user (for authorization) """ # Avoid circular imports + from backend.api.features.library.db import set_preset_webhook from backend.data.graph import set_node_webhook - from backend.server.v2.library.db import set_preset_webhook # Find all nodes in this graph that use this webhook nodes = await AgentNode.prisma().find_many( diff --git a/autogpt_platform/backend/backend/data/notification_bus.py b/autogpt_platform/backend/backend/data/notification_bus.py index 6eb90dca12..fbd484d379 100644 --- a/autogpt_platform/backend/backend/data/notification_bus.py +++ b/autogpt_platform/backend/backend/data/notification_bus.py @@ -4,8 +4,8 @@ from typing import AsyncGenerator from pydantic import BaseModel, field_serializer +from backend.api.model import NotificationPayload from backend.data.event_bus import AsyncRedisEventBus -from backend.server.model import NotificationPayload from backend.util.settings import Settings diff --git a/autogpt_platform/backend/backend/data/onboarding.py b/autogpt_platform/backend/backend/data/onboarding.py index 1415c7694e..d9977e9535 100644 --- a/autogpt_platform/backend/backend/data/onboarding.py +++ b/autogpt_platform/backend/backend/data/onboarding.py @@ -9,6 +9,8 @@ from prisma.enums import OnboardingStep from prisma.models import UserOnboarding from prisma.types import UserOnboardingCreateInput, UserOnboardingUpdateInput +from backend.api.features.store.model import StoreAgentDetails +from backend.api.model import OnboardingNotificationPayload from backend.data import execution as execution_db from backend.data.credit import get_user_credit_model from backend.data.notification_bus import ( @@ -16,8 +18,6 @@ from backend.data.notification_bus import ( NotificationEvent, ) from backend.data.user import get_user_by_id -from backend.server.model import OnboardingNotificationPayload -from backend.server.v2.store.model import StoreAgentDetails from backend.util.cache import cached from backend.util.json import SafeJson from backend.util.timezone_utils import get_user_timezone_or_utc diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/__init__.py b/autogpt_platform/backend/backend/executor/automod/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/AutoMod/__init__.py rename to autogpt_platform/backend/backend/executor/automod/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/manager.py b/autogpt_platform/backend/backend/executor/automod/manager.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/AutoMod/manager.py rename to autogpt_platform/backend/backend/executor/automod/manager.py index 181fcec248..81001196dd 100644 --- a/autogpt_platform/backend/backend/server/v2/AutoMod/manager.py +++ b/autogpt_platform/backend/backend/executor/automod/manager.py @@ -9,16 +9,13 @@ if TYPE_CHECKING: from pydantic import ValidationError from backend.data.execution import ExecutionStatus -from backend.server.v2.AutoMod.models import ( - AutoModRequest, - AutoModResponse, - ModerationConfig, -) from backend.util.exceptions import ModerationError from backend.util.feature_flag import Flag, is_feature_enabled from backend.util.request import Requests from backend.util.settings import Settings +from .models import AutoModRequest, AutoModResponse, ModerationConfig + logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/models.py b/autogpt_platform/backend/backend/executor/automod/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/AutoMod/models.py rename to autogpt_platform/backend/backend/executor/automod/models.py diff --git a/autogpt_platform/backend/backend/executor/database.py b/autogpt_platform/backend/backend/executor/database.py index 9c2b3970c6..af68bf526d 100644 --- a/autogpt_platform/backend/backend/executor/database.py +++ b/autogpt_platform/backend/backend/executor/database.py @@ -2,6 +2,11 @@ import logging from contextlib import asynccontextmanager from typing import TYPE_CHECKING, Callable, Concatenate, ParamSpec, TypeVar, cast +from backend.api.features.library.db import ( + add_store_agent_to_library, + list_library_agents, +) +from backend.api.features.store.db import get_store_agent_details, get_store_agents from backend.data import db from backend.data.analytics import ( get_accuracy_trends_and_alerts, @@ -61,8 +66,6 @@ from backend.data.user import ( get_user_notification_preference, update_user_integrations, ) -from backend.server.v2.library.db import add_store_agent_to_library, list_library_agents -from backend.server.v2.store.db import get_store_agent_details, get_store_agents from backend.util.service import ( AppService, AppServiceClient, diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py index 234f8127c8..161e68b0d6 100644 --- a/autogpt_platform/backend/backend/executor/manager.py +++ b/autogpt_platform/backend/backend/executor/manager.py @@ -48,27 +48,8 @@ from backend.data.notifications import ( ZeroBalanceData, ) from backend.data.rabbitmq import SyncRabbitMQ -from backend.executor.activity_status_generator import ( - generate_activity_status_for_execution, -) -from backend.executor.utils import ( - GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS, - GRAPH_EXECUTION_CANCEL_QUEUE_NAME, - GRAPH_EXECUTION_EXCHANGE, - GRAPH_EXECUTION_QUEUE_NAME, - GRAPH_EXECUTION_ROUTING_KEY, - CancelExecutionEvent, - ExecutionOutputEntry, - LogMetadata, - NodeExecutionProgress, - block_usage_cost, - create_execution_queue_config, - execution_usage_cost, - validate_exec, -) from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.notifications.notifications import queue_notification -from backend.server.v2.AutoMod.manager import automod_manager from backend.util import json from backend.util.clients import ( get_async_execution_event_bus, @@ -95,7 +76,24 @@ from backend.util.retry import ( ) from backend.util.settings import Settings +from .activity_status_generator import generate_activity_status_for_execution +from .automod.manager import automod_manager from .cluster_lock import ClusterLock +from .utils import ( + GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS, + GRAPH_EXECUTION_CANCEL_QUEUE_NAME, + GRAPH_EXECUTION_EXCHANGE, + GRAPH_EXECUTION_QUEUE_NAME, + GRAPH_EXECUTION_ROUTING_KEY, + CancelExecutionEvent, + ExecutionOutputEntry, + LogMetadata, + NodeExecutionProgress, + block_usage_cost, + create_execution_queue_config, + execution_usage_cost, + validate_exec, +) if TYPE_CHECKING: from backend.executor import DatabaseManagerAsyncClient, DatabaseManagerClient diff --git a/autogpt_platform/backend/backend/executor/manager_test.py b/autogpt_platform/backend/backend/executor/manager_test.py index cd543fef4e..bdfdb5d724 100644 --- a/autogpt_platform/backend/backend/executor/manager_test.py +++ b/autogpt_platform/backend/backend/executor/manager_test.py @@ -3,16 +3,16 @@ import logging import fastapi.responses import pytest -import backend.server.v2.library.model -import backend.server.v2.store.model +import backend.api.features.library.model +import backend.api.features.store.model +from backend.api.model import CreateGraph +from backend.api.rest_api import AgentServer from backend.blocks.basic import StoreValueBlock from backend.blocks.data_manipulation import FindInDictionaryBlock from backend.blocks.io import AgentInputBlock from backend.blocks.maths import CalculatorBlock, Operation from backend.data import execution, graph from backend.data.model import User -from backend.server.model import CreateGraph -from backend.server.rest_api import AgentServer from backend.usecases.sample import create_test_graph, create_test_user from backend.util.test import SpinTestServer, wait_execution @@ -356,7 +356,7 @@ async def test_execute_preset(server: SpinTestServer): test_graph = await create_graph(server, test_graph, test_user) # Create preset with initial values - preset = backend.server.v2.library.model.LibraryAgentPresetCreatable( + preset = backend.api.features.library.model.LibraryAgentPresetCreatable( name="Test Preset With Clash", description="Test preset with clashing input values", graph_id=test_graph.id, @@ -444,7 +444,7 @@ async def test_execute_preset_with_clash(server: SpinTestServer): test_graph = await create_graph(server, test_graph, test_user) # Create preset with initial values - preset = backend.server.v2.library.model.LibraryAgentPresetCreatable( + preset = backend.api.features.library.model.LibraryAgentPresetCreatable( name="Test Preset With Clash", description="Test preset with clashing input values", graph_id=test_graph.id, @@ -485,7 +485,7 @@ async def test_store_listing_graph(server: SpinTestServer): test_user = await create_test_user() test_graph = await create_graph(server, create_test_graph(), test_user) - store_submission_request = backend.server.v2.store.model.StoreSubmissionRequest( + store_submission_request = backend.api.features.store.model.StoreSubmissionRequest( agent_id=test_graph.id, agent_version=test_graph.version, slug=test_graph.id, @@ -514,7 +514,7 @@ async def test_store_listing_graph(server: SpinTestServer): admin_user = await create_test_user(alt_user=True) await server.agent_server.test_review_store_listing( - backend.server.v2.store.model.ReviewSubmissionRequest( + backend.api.features.store.model.ReviewSubmissionRequest( store_listing_version_id=slv_id, is_approved=True, comments="Test comments", @@ -523,7 +523,7 @@ async def test_store_listing_graph(server: SpinTestServer): ) # Add the approved store listing to the admin user's library so they can execute it - from backend.server.v2.library.db import add_store_agent_to_library + from backend.api.features.library.db import add_store_agent_to_library await add_store_agent_to_library( store_listing_version_id=slv_id, user_id=admin_user.id diff --git a/autogpt_platform/backend/backend/executor/scheduler_test.py b/autogpt_platform/backend/backend/executor/scheduler_test.py index c4fa35d46c..21acbaf0e1 100644 --- a/autogpt_platform/backend/backend/executor/scheduler_test.py +++ b/autogpt_platform/backend/backend/executor/scheduler_test.py @@ -1,7 +1,7 @@ import pytest +from backend.api.model import CreateGraph from backend.data import db -from backend.server.model import CreateGraph from backend.usecases.sample import create_test_graph, create_test_user from backend.util.clients import get_scheduler_client from backend.util.test import SpinTestServer diff --git a/autogpt_platform/backend/backend/integrations/webhooks/utils.py b/autogpt_platform/backend/backend/integrations/webhooks/utils.py index fafd641c93..79316c4c0e 100644 --- a/autogpt_platform/backend/backend/integrations/webhooks/utils.py +++ b/autogpt_platform/backend/backend/integrations/webhooks/utils.py @@ -149,10 +149,10 @@ async def setup_webhook_for_block( async def migrate_legacy_triggered_graphs(): from prisma.models import AgentGraph + from backend.api.features.library.db import create_preset + from backend.api.features.library.model import LibraryAgentPresetCreatable from backend.data.graph import AGENT_GRAPH_INCLUDE, GraphModel, set_node_webhook from backend.data.model import is_credentials_field_name - from backend.server.v2.library.db import create_preset - from backend.server.v2.library.model import LibraryAgentPresetCreatable triggered_graphs = [ GraphModel.from_db(_graph) diff --git a/autogpt_platform/backend/backend/rest.py b/autogpt_platform/backend/backend/rest.py index b601144c6f..96a807c125 100644 --- a/autogpt_platform/backend/backend/rest.py +++ b/autogpt_platform/backend/backend/rest.py @@ -1,5 +1,5 @@ +from backend.api.rest_api import AgentServer from backend.app import run_processes -from backend.server.rest_api import AgentServer def main(): diff --git a/autogpt_platform/backend/backend/server/external/api.py b/autogpt_platform/backend/backend/server/external/api.py deleted file mode 100644 index 3bafa63108..0000000000 --- a/autogpt_platform/backend/backend/server/external/api.py +++ /dev/null @@ -1,29 +0,0 @@ -from fastapi import FastAPI - -from backend.monitoring.instrumentation import instrument_fastapi -from backend.server.middleware.security import SecurityHeadersMiddleware - -from .routes.integrations import integrations_router -from .routes.tools import tools_router -from .routes.v1 import v1_router - -external_app = FastAPI( - title="AutoGPT External API", - description="External API for AutoGPT integrations", - docs_url="/docs", - version="1.0", -) - -external_app.add_middleware(SecurityHeadersMiddleware) -external_app.include_router(v1_router, prefix="/v1") -external_app.include_router(tools_router, prefix="/v1") -external_app.include_router(integrations_router, prefix="/v1") - -# Add Prometheus instrumentation -instrument_fastapi( - external_app, - service_name="external-api", - expose_endpoint=True, - endpoint="/metrics", - include_in_schema=True, -) diff --git a/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py b/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py deleted file mode 100644 index 7040faa0b5..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py +++ /dev/null @@ -1,150 +0,0 @@ -"""Example of analytics tests with improved error handling and assertions.""" - -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes -from backend.server.test_helpers import ( - assert_error_response_structure, - assert_mock_called_with_partial, - assert_response_status, - safe_parse_json, -) - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -def test_log_raw_metric_success_improved( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw metric logging with improved assertions.""" - # Mock the analytics function - mock_result = Mock(id="metric-123-uuid") - - mock_log_metric = mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": "page_load_time", - "metric_value": 2.5, - "data_string": "/dashboard", - } - - response = client.post("/log_raw_metric", json=request_data) - - # Improved assertions with better error messages - assert_response_status(response, 200, "Metric logging should succeed") - response_data = safe_parse_json(response, "Metric response parsing") - - assert response_data == "metric-123-uuid", f"Unexpected response: {response_data}" - - # Verify the function was called with correct parameters - assert_mock_called_with_partial( - mock_log_metric, - user_id=test_user_id, - metric_name="page_load_time", - metric_value=2.5, - data_string="/dashboard", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"metric_id": response_data}, indent=2, sort_keys=True), - "analytics_log_metric_success_improved", - ) - - -def test_log_raw_metric_invalid_request_improved() -> None: - """Test invalid metric request with improved error assertions.""" - # Test missing required fields - response = client.post("/log_raw_metric", json={}) - - error_data = assert_error_response_structure( - response, expected_status=422, expected_error_fields=["loc", "msg", "type"] - ) - - # Verify specific error details - detail = error_data["detail"] - assert isinstance(detail, list), "Error detail should be a list" - assert len(detail) > 0, "Should have at least one error" - - # Check that required fields are mentioned in errors - error_fields = [error["loc"][-1] for error in detail if "loc" in error] - assert "metric_name" in error_fields, "Should report missing metric_name" - assert "metric_value" in error_fields, "Should report missing metric_value" - assert "data_string" in error_fields, "Should report missing data_string" - - -def test_log_raw_metric_type_validation_improved( - mocker: pytest_mock.MockFixture, -) -> None: - """Test metric type validation with improved assertions.""" - # Mock the analytics function to avoid event loop issues - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=Mock(id="test-id"), - ) - - invalid_requests = [ - { - "data": { - "metric_name": "test", - "metric_value": "not_a_number", # Invalid type - "data_string": "test", - }, - "expected_error": "Input should be a valid number", - }, - { - "data": { - "metric_name": "", # Empty string - "metric_value": 1.0, - "data_string": "test", - }, - "expected_error": "String should have at least 1 character", - }, - { - "data": { - "metric_name": "test", - "metric_value": 123, # Valid number - "data_string": "", # Empty data_string - }, - "expected_error": "String should have at least 1 character", - }, - ] - - for test_case in invalid_requests: - response = client.post("/log_raw_metric", json=test_case["data"]) - - error_data = assert_error_response_structure(response, expected_status=422) - - # Check that expected error is in the response - error_text = json.dumps(error_data) - assert ( - test_case["expected_error"] in error_text - or test_case["expected_error"].lower() in error_text.lower() - ), f"Expected error '{test_case['expected_error']}' not found in: {error_text}" diff --git a/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py b/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py deleted file mode 100644 index 9dbf03b727..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Example of parametrized tests for analytics endpoints.""" - -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -@pytest.mark.parametrize( - "metric_value,metric_name,data_string,test_id", - [ - (100, "api_calls_count", "external_api", "integer_value"), - (0, "error_count", "no_errors", "zero_value"), - (-5.2, "temperature_delta", "cooling", "negative_value"), - (1.23456789, "precision_test", "float_precision", "float_precision"), - (999999999, "large_number", "max_value", "large_number"), - (0.0000001, "tiny_number", "min_value", "tiny_number"), - ], -) -def test_log_raw_metric_values_parametrized( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - metric_value: float, - metric_name: str, - data_string: str, - test_id: str, -) -> None: - """Test raw metric logging with various metric values using parametrize.""" - # Mock the analytics function - mock_result = Mock(id=f"metric-{test_id}-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": metric_name, - "metric_value": metric_value, - "data_string": data_string, - } - - response = client.post("/log_raw_metric", json=request_data) - - # Better error handling - assert response.status_code == 200, f"Failed for {test_id}: {response.text}" - response_data = response.json() - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps( - {"metric_id": response_data, "test_case": test_id}, indent=2, sort_keys=True - ), - f"analytics_metric_{test_id}", - ) - - -@pytest.mark.parametrize( - "invalid_data,expected_error", - [ - ({}, "Field required"), # Missing all fields - ({"metric_name": "test"}, "Field required"), # Missing metric_value - ( - {"metric_name": "test", "metric_value": "not_a_number"}, - "Input should be a valid number", - ), # Invalid type - ( - {"metric_name": "", "metric_value": 1.0, "data_string": "test"}, - "String should have at least 1 character", - ), # Empty name - ], -) -def test_log_raw_metric_invalid_requests_parametrized( - mocker: pytest_mock.MockFixture, - invalid_data: dict, - expected_error: str, -) -> None: - """Test invalid metric requests with parametrize.""" - # Mock the analytics function to avoid event loop issues - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=Mock(id="test-id"), - ) - - response = client.post("/log_raw_metric", json=invalid_data) - - assert response.status_code == 422 - error_detail = response.json() - assert "detail" in error_detail - # Verify error message contains expected error - error_text = json.dumps(error_detail) - assert expected_error in error_text or expected_error.lower() in error_text.lower() diff --git a/autogpt_platform/backend/backend/server/routers/analytics_test.py b/autogpt_platform/backend/backend/server/routers/analytics_test.py deleted file mode 100644 index 16ee6708dc..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_test.py +++ /dev/null @@ -1,284 +0,0 @@ -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -def test_log_raw_metric_success( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw metric logging""" - - # Mock the analytics function - mock_result = Mock(id="metric-123-uuid") - - mock_log_metric = mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": "page_load_time", - "metric_value": 2.5, - "data_string": "/dashboard", - } - - response = client.post("/log_raw_metric", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - assert response_data == "metric-123-uuid" - - # Verify the function was called with correct parameters - mock_log_metric.assert_called_once_with( - user_id=test_user_id, - metric_name="page_load_time", - metric_value=2.5, - data_string="/dashboard", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), - "analytics_log_metric_success", - ) - - -def test_log_raw_metric_various_values( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, -) -> None: - """Test raw metric logging with various metric values""" - - # Mock the analytics function - mock_result = Mock(id="metric-456-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - # Test with integer value - request_data = { - "metric_name": "api_calls_count", - "metric_value": 100, - "data_string": "external_api", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Test with zero value - request_data = { - "metric_name": "error_count", - "metric_value": 0, - "data_string": "no_errors", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Test with negative value - request_data = { - "metric_name": "temperature_delta", - "metric_value": -5.2, - "data_string": "cooling", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Snapshot the last response - configured_snapshot.assert_match( - json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), - "analytics_log_metric_various_values", - ) - - -def test_log_raw_analytics_success( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw analytics logging""" - - # Mock the analytics function - mock_result = Mock(id="analytics-789-uuid") - - mock_log_analytics = mocker.patch( - "backend.data.analytics.log_raw_analytics", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "type": "user_action", - "data": { - "action": "button_click", - "button_id": "submit_form", - "timestamp": "2023-01-01T00:00:00Z", - "metadata": { - "form_type": "registration", - "fields_filled": 5, - }, - }, - "data_index": "button_click_submit_form", - } - - response = client.post("/log_raw_analytics", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - assert response_data == "analytics-789-uuid" - - # Verify the function was called with correct parameters - mock_log_analytics.assert_called_once_with( - test_user_id, - "user_action", - request_data["data"], - "button_click_submit_form", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"analytics_id": response_data}, indent=2, sort_keys=True), - "analytics_log_analytics_success", - ) - - -def test_log_raw_analytics_complex_data( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, -) -> None: - """Test raw analytics logging with complex nested data""" - - # Mock the analytics function - mock_result = Mock(id="analytics-complex-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_analytics", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "type": "agent_execution", - "data": { - "agent_id": "agent_123", - "execution_id": "exec_456", - "status": "completed", - "duration_ms": 3500, - "nodes_executed": 15, - "blocks_used": [ - {"block_id": "llm_block", "count": 3}, - {"block_id": "http_block", "count": 5}, - {"block_id": "code_block", "count": 2}, - ], - "errors": [], - "metadata": { - "trigger": "manual", - "user_tier": "premium", - "environment": "production", - }, - }, - "data_index": "agent_123_exec_456", - } - - response = client.post("/log_raw_analytics", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - - # Snapshot test the complex data structure - configured_snapshot.assert_match( - json.dumps( - { - "analytics_id": response_data, - "logged_data": request_data["data"], - }, - indent=2, - sort_keys=True, - ), - "analytics_log_analytics_complex_data", - ) - - -def test_log_raw_metric_invalid_request() -> None: - """Test raw metric logging with invalid request data""" - # Missing required fields - response = client.post("/log_raw_metric", json={}) - assert response.status_code == 422 - - # Invalid metric_value type - response = client.post( - "/log_raw_metric", - json={ - "metric_name": "test", - "metric_value": "not_a_number", - "data_string": "test", - }, - ) - assert response.status_code == 422 - - # Missing data_string - response = client.post( - "/log_raw_metric", - json={ - "metric_name": "test", - "metric_value": 1.0, - }, - ) - assert response.status_code == 422 - - -def test_log_raw_analytics_invalid_request() -> None: - """Test raw analytics logging with invalid request data""" - # Missing required fields - response = client.post("/log_raw_analytics", json={}) - assert response.status_code == 422 - - # Invalid data type (should be dict) - response = client.post( - "/log_raw_analytics", - json={ - "type": "test", - "data": "not_a_dict", - "data_index": "test", - }, - ) - assert response.status_code == 422 - - # Missing data_index - response = client.post( - "/log_raw_analytics", - json={ - "type": "test", - "data": {"key": "value"}, - }, - ) - assert response.status_code == 422 diff --git a/autogpt_platform/backend/backend/util/test.py b/autogpt_platform/backend/backend/util/test.py index 95ea9554ed..1e8244ff8e 100644 --- a/autogpt_platform/backend/backend/util/test.py +++ b/autogpt_platform/backend/backend/util/test.py @@ -6,6 +6,7 @@ from typing import Sequence, cast from autogpt_libs.auth import get_user_id +from backend.api.rest_api import AgentServer from backend.data import db from backend.data.block import Block, BlockSchema, initialize_blocks from backend.data.execution import ( @@ -18,7 +19,6 @@ from backend.data.model import _BaseCredentials from backend.data.user import create_default_user from backend.executor import DatabaseManager, ExecutionManager, Scheduler from backend.notifications.notifications import NotificationManager -from backend.server.rest_api import AgentServer log = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/util/virus_scanner.py b/autogpt_platform/backend/backend/util/virus_scanner.py index 1ea31cac95..aa43e5f5d9 100644 --- a/autogpt_platform/backend/backend/util/virus_scanner.py +++ b/autogpt_platform/backend/backend/util/virus_scanner.py @@ -196,7 +196,7 @@ async def scan_content_safe(content: bytes, *, filename: str = "unknown") -> Non VirusDetectedError: If virus is found VirusScanError: If scanning fails """ - from backend.server.v2.store.exceptions import VirusDetectedError, VirusScanError + from backend.api.features.store.exceptions import VirusDetectedError, VirusScanError try: result = await get_virus_scanner().scan_file(content, filename=filename) diff --git a/autogpt_platform/backend/backend/util/virus_scanner_test.py b/autogpt_platform/backend/backend/util/virus_scanner_test.py index 81b5ad3342..77010c7320 100644 --- a/autogpt_platform/backend/backend/util/virus_scanner_test.py +++ b/autogpt_platform/backend/backend/util/virus_scanner_test.py @@ -3,7 +3,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest -from backend.server.v2.store.exceptions import VirusDetectedError, VirusScanError +from backend.api.features.store.exceptions import VirusDetectedError, VirusScanError from backend.util.virus_scanner import ( VirusScannerService, VirusScannerSettings, diff --git a/autogpt_platform/backend/backend/ws.py b/autogpt_platform/backend/backend/ws.py index 3b15a60eb0..77e2e82a90 100644 --- a/autogpt_platform/backend/backend/ws.py +++ b/autogpt_platform/backend/backend/ws.py @@ -1,5 +1,5 @@ +from backend.api.ws_api import WebsocketServer from backend.app import run_processes -from backend.server.ws_api import WebsocketServer def main(): diff --git a/autogpt_platform/backend/test/e2e_test_data.py b/autogpt_platform/backend/test/e2e_test_data.py index 943c506f5c..d7576cdad3 100644 --- a/autogpt_platform/backend/test/e2e_test_data.py +++ b/autogpt_platform/backend/test/e2e_test_data.py @@ -23,16 +23,18 @@ from typing import Any, Dict, List from faker import Faker +# Import API functions from the backend +from backend.api.features.library.db import create_library_agent, create_preset +from backend.api.features.library.model import LibraryAgentPresetCreatable +from backend.api.features.store.db import ( + create_store_submission, + review_store_submission, +) from backend.data.auth.api_key import create_api_key from backend.data.credit import get_user_credit_model from backend.data.db import prisma from backend.data.graph import Graph, Link, Node, create_graph from backend.data.user import get_or_create_user - -# Import API functions from the backend -from backend.server.v2.library.db import create_library_agent, create_preset -from backend.server.v2.library.model import LibraryAgentPresetCreatable -from backend.server.v2.store.db import create_store_submission, review_store_submission from backend.util.clients import get_supabase faker = Faker() diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx index 8093b75965..7472ef212e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx @@ -16,7 +16,7 @@ import type { APIKeyPermission } from "@/app/api/__generated__/models/aPIKeyPerm // Human-readable scope descriptions const SCOPE_DESCRIPTIONS: { [key in APIKeyPermission]: string } = { - IDENTITY: "Read user ID, name, e-mail, and timezone", + IDENTITY: "View your user ID, e-mail, and timezone", EXECUTE_GRAPH: "Run your agents", READ_GRAPH: "View your agents and their configurations", EXECUTE_BLOCK: "Execute individual blocks", diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 3556e2f5c7..61a3600892 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -2,46 +2,33 @@ "openapi": "3.1.0", "info": { "title": "AutoGPT Agent Server", + "summary": "AutoGPT Agent Server", "description": "This server is used to execute agents that are created by the AutoGPT system.", "version": "0.1" }, "paths": { - "/api/integrations/{provider}/login": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Initiate OAuth flow", - "operationId": "getV1Initiate oauth flow", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The provider to initiate an OAuth flow for", - "description": "Provider name for integrations. Can be any string value, including custom provider names." + "/api/analytics/log_raw_analytics": { + "post": { + "tags": ["analytics"], + "summary": "Log Raw Analytics", + "operationId": "postAnalyticsLogRawAnalytics", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postAnalyticsLogRawAnalytics" + } } }, - { - "name": "scopes", - "in": "query", - "required": false, - "schema": { - "type": "string", - "title": "Comma-separated list of authorization scopes", - "default": "" - } - } - ], + "required": true + }, "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LoginResponse" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -50,9 +37,2773 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/analytics/log_raw_metric": { + "post": { + "tags": ["analytics"], + "summary": "Log Raw Metric", + "operationId": "postAnalyticsLogRawMetric", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LogRawMetricRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/api-keys": { + "get": { + "tags": ["v1", "api-keys"], + "summary": "List user API keys", + "description": "List all API keys for the user", + "operationId": "getV1List user api keys", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/APIKeyInfo" }, + "type": "array", + "title": "Response Getv1List User Api Keys" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "api-keys"], + "summary": "Create new API key", + "description": "Create a new API key", + "operationId": "postV1Create new api key", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreateAPIKeyRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAPIKeyResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/api-keys/{key_id}": { + "delete": { + "tags": ["v1", "api-keys"], + "summary": "Revoke API key", + "description": "Revoke an API key", + "operationId": "deleteV1Revoke api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v1", "api-keys"], + "summary": "Get specific API key", + "description": "Get a specific API key", + "operationId": "getV1Get specific api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/api-keys/{key_id}/permissions": { + "put": { + "tags": ["v1", "api-keys"], + "summary": "Update key permissions", + "description": "Update API key permissions", + "operationId": "putV1Update key permissions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdatePermissionsRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/api-keys/{key_id}/suspend": { + "post": { + "tags": ["v1", "api-keys"], + "summary": "Suspend API key", + "description": "Suspend an API key", + "operationId": "postV1Suspend api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/auth/user": { + "post": { + "tags": ["v1", "auth"], + "summary": "Get or create user", + "operationId": "postV1Get or create user", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/email": { + "post": { + "tags": ["v1", "auth"], + "summary": "Update user email", + "operationId": "postV1Update user email", + "requestBody": { + "content": { + "application/json": { + "schema": { "type": "string", "title": "Email" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Response Postv1Update User Email" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/preferences": { + "get": { + "tags": ["v1", "auth"], + "summary": "Get notification preferences", + "operationId": "getV1Get notification preferences", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreference" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "auth"], + "summary": "Update notification preferences", + "operationId": "postV1Update notification preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreferenceDTO" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreference" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/timezone": { + "get": { + "tags": ["v1", "auth"], + "summary": "Get user timezone", + "description": "Get user timezone setting.", + "operationId": "getV1Get user timezone", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TimezoneResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "auth"], + "summary": "Update user timezone", + "description": "Update user timezone. The timezone should be a valid IANA timezone identifier.", + "operationId": "postV1Update user timezone", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateTimezoneRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TimezoneResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/blocks": { + "get": { + "tags": ["v1", "blocks"], + "summary": "List available blocks", + "operationId": "getV1List available blocks", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "additionalProperties": true, "type": "object" }, + "type": "array", + "title": "Response Getv1List Available Blocks" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/blocks/{block_id}/execute": { + "post": { + "tags": ["v1", "blocks"], + "summary": "Execute graph block", + "operationId": "postV1Execute graph block", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "block_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Block Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": true, + "title": "Data" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": { "type": "array", "items": {} }, + "title": "Response Postv1Execute Graph Block" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/blocks": { + "get": { + "tags": ["v2"], + "summary": "Get Builder blocks", + "description": "Get blocks based on either category, type, or provider.", + "operationId": "getV2Get builder blocks", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "category", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Category" + } + }, + { + "name": "type", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "enum": ["all", "input", "action", "output"], + "type": "string" + }, + { "type": "null" } + ], + "title": "Type" + } + }, + { + "name": "provider", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + }, + { "type": "null" } + ], + "title": "Provider" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/BlockResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/blocks/batch": { + "get": { + "tags": ["v2"], + "summary": "Get specific blocks", + "description": "Get specific blocks by their IDs.", + "operationId": "getV2Get specific blocks", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "block_ids", + "in": "query", + "required": true, + "schema": { + "type": "array", + "items": { "type": "string" }, + "title": "Block Ids" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { "$ref": "#/components/schemas/BlockInfo" }, + "title": "Response Getv2Get Specific Blocks" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/categories": { + "get": { + "tags": ["v2"], + "summary": "Get Builder block categories", + "description": "Get all block categories with a specified number of blocks per category.", + "operationId": "getV2Get builder block categories", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "blocks_per_category", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 3, + "title": "Blocks Per Category" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BlockCategoryResponse" + }, + "title": "Response Getv2Get Builder Block Categories" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/counts": { + "get": { + "tags": ["v2"], + "summary": "Get Builder item counts", + "description": "Get item counts for the menu categories in the Blocks Menu.", + "operationId": "getV2Get builder item counts", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CountResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/builder/providers": { + "get": { + "tags": ["v2"], + "summary": "Get Builder integration providers", + "description": "Get all integration providers with their block counts.", + "operationId": "getV2Get builder integration providers", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ProviderResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/search": { + "get": { + "tags": ["v2", "store", "private"], + "summary": "Builder search", + "description": "Search for blocks (including integrations), marketplace agents, and user library agents.", + "operationId": "getV2Builder search", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "search_query", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search Query" + } + }, + { + "name": "filter", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "enum": [ + "blocks", + "integrations", + "marketplace_agents", + "my_agents" + ], + "type": "string" + } + }, + { "type": "null" } + ], + "title": "Filter" + } + }, + { + "name": "search_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search Id" + } + }, + { + "name": "by_creator", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "type": "array", "items": { "type": "string" } }, + { "type": "null" } + ], + "title": "By Creator" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SearchResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/suggestions": { + "get": { + "tags": ["v2"], + "summary": "Get Builder suggestions", + "description": "Get all suggestions for the Blocks Menu.", + "operationId": "getV2Get builder suggestions", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SuggestionsResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/chat/health": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Health Check", + "description": "Health check endpoint for the chat service.\n\nPerforms a full cycle test of session creation, assignment, and retrieval. Should always return healthy\nif the service and data layer are operational.\n\nReturns:\n dict: A status dictionary indicating health, service name, and API version.", + "operationId": "getV2HealthCheck", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": true, + "type": "object", + "title": "Response Getv2Healthcheck" + } + } + } + } + } + } + }, + "/api/chat/sessions": { + "post": { + "tags": ["v2", "chat", "chat"], + "summary": "Create Session", + "description": "Create a new chat session.\n\nInitiates a new chat session for either an authenticated or anonymous user.\n\nArgs:\n user_id: The optional authenticated user ID parsed from the JWT. If missing, creates an anonymous session.\n\nReturns:\n CreateSessionResponse: Details of the created session.", + "operationId": "postV2CreateSession", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSessionResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/chat/sessions/{session_id}": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Get Session", + "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.", + "operationId": "getV2GetSession", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SessionDetailResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/chat/sessions/{session_id}/assign-user": { + "patch": { + "tags": ["v2", "chat", "chat"], + "summary": "Session Assign User", + "description": "Assign an authenticated user to a chat session.\n\nUsed (typically post-login) to claim an existing anonymous session as the current authenticated user.\n\nArgs:\n session_id: The identifier for the (previously anonymous) session.\n user_id: The authenticated user's ID to associate with the session.\n\nReturns:\n dict: Status of the assignment.", + "operationId": "patchV2SessionAssignUser", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": true, + "title": "Response Patchv2Sessionassignuser" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/chat/sessions/{session_id}/stream": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Stream Chat", + "description": "Stream chat responses for a session.\n\nStreams the AI/completion responses in real time over Server-Sent Events (SSE), including:\n - Text fragments as they are generated\n - Tool call UI elements (if invoked)\n - Tool execution results\n\nArgs:\n session_id: The chat session identifier to associate with the streamed messages.\n message: The user's new message to process.\n user_id: Optional authenticated user ID.\n is_user_message: Whether the message is a user message.\nReturns:\n StreamingResponse: SSE-formatted response chunks.", + "operationId": "getV2StreamChat", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + }, + { + "name": "message", + "in": "query", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "maxLength": 10000, + "title": "Message" + } + }, + { + "name": "is_user_message", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": true, + "title": "Is User Message" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get user credits", + "operationId": "getV1Get user credits", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "integer" }, + "type": "object", + "title": "Response Getv1Get User Credits" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "patch": { + "tags": ["v1", "credits"], + "summary": "Fulfill checkout session", + "operationId": "patchV1Fulfill checkout session", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "credits"], + "summary": "Request credit top up", + "operationId": "postV1Request credit top up", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/RequestTopUp" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/admin/add_credits": { + "post": { + "tags": ["v2", "admin", "credits", "admin"], + "summary": "Add Credits to User", + "operationId": "postV2Add credits to user", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Add_credits_to_user" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserCreditsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/admin/users_history": { + "get": { + "tags": ["v2", "admin", "credits", "admin"], + "summary": "Get All Users History", + "operationId": "getV2Get all users history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "search", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 20, "title": "Page Size" } + }, + { + "name": "transaction_filter", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/CreditTransactionType" }, + { "type": "null" } + ], + "title": "Transaction Filter" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UserHistoryResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits/auto-top-up": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get auto top up", + "operationId": "getV1Get auto top up", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "credits"], + "summary": "Configure auto top up", + "operationId": "postV1Configure auto top up", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "string", + "title": "Response Postv1Configure Auto Top Up" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/manage": { + "get": { + "tags": ["v1", "credits"], + "summary": "Manage payment methods", + "operationId": "getV1Manage payment methods", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Response Getv1Manage Payment Methods" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/refunds": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get refund requests", + "operationId": "getV1Get refund requests", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/RefundRequest" }, + "type": "array", + "title": "Response Getv1Get Refund Requests" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/stripe_webhook": { + "post": { + "tags": ["v1", "credits"], + "summary": "Handle Stripe webhooks", + "operationId": "postV1Handle stripe webhooks", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + } + } + } + }, + "/api/credits/transactions": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get credit history", + "operationId": "getV1Get credit history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "transaction_time", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Transaction Time" + } + }, + { + "name": "transaction_type", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Transaction Type" + } + }, + { + "name": "transaction_count_limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 100, + "title": "Transaction Count Limit" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TransactionHistory" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits/{transaction_key}/refund": { + "post": { + "tags": ["v1", "credits"], + "summary": "Refund credit transaction", + "operationId": "postV1Refund credit transaction", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "transaction_key", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Transaction Key" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": { "type": "string" }, + "title": "Metadata" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "integer", + "title": "Response Postv1Refund Credit Transaction" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/email/": { + "post": { + "tags": ["v1", "email"], + "summary": "Handle Postmark Email Webhooks", + "operationId": "postV1Handle postmark email webhooks", + "requestBody": { + "content": { + "application/json": { + "schema": { + "oneOf": [ + { "$ref": "#/components/schemas/PostmarkDeliveryWebhook" }, + { "$ref": "#/components/schemas/PostmarkBounceWebhook" }, + { + "$ref": "#/components/schemas/PostmarkSpamComplaintWebhook" + }, + { "$ref": "#/components/schemas/PostmarkOpenWebhook" }, + { "$ref": "#/components/schemas/PostmarkClickWebhook" }, + { + "$ref": "#/components/schemas/PostmarkSubscriptionChangeWebhook" + } + ], + "title": "Webhook", + "discriminator": { + "propertyName": "RecordType", + "mapping": { + "Delivery": "#/components/schemas/PostmarkDeliveryWebhook", + "Bounce": "#/components/schemas/PostmarkBounceWebhook", + "SpamComplaint": "#/components/schemas/PostmarkSpamComplaintWebhook", + "Open": "#/components/schemas/PostmarkOpenWebhook", + "Click": "#/components/schemas/PostmarkClickWebhook", + "SubscriptionChange": "#/components/schemas/PostmarkSubscriptionChangeWebhook" + } + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "APIKeyAuthenticator-X-Postmark-Webhook-Token": [] }] + } + }, + "/api/email/unsubscribe": { + "post": { + "tags": ["v1", "email"], + "summary": "One Click Email Unsubscribe", + "operationId": "postV1One click email unsubscribe", + "parameters": [ + { + "name": "token", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Token" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/executions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List all executions", + "operationId": "getV1List all executions", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/GraphExecutionMeta" + }, + "type": "array", + "title": "Response Getv1List All Executions" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/admin/execution_accuracy_trends": { + "get": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Get Execution Accuracy Trends and Alerts", + "description": "Get execution accuracy trends with moving averages and alert detection.\nSimple single-query approach.", + "operationId": "getV2Get execution accuracy trends and alerts", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "user_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "User Id" + } + }, + { + "name": "days_back", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 30, "title": "Days Back" } + }, + { + "name": "drop_threshold", + "in": "query", + "required": false, + "schema": { + "type": "number", + "default": 10.0, + "title": "Drop Threshold" + } + }, + { + "name": "include_historical", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Include Historical" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AccuracyTrendsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/executions/admin/execution_analytics": { + "post": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Generate Execution Analytics", + "description": "Generate activity summaries and correctness scores for graph executions.\n\nThis endpoint:\n1. Fetches all completed executions matching the criteria\n2. Identifies executions missing activity_status or correctness_score\n3. Generates missing data using AI in batches\n4. Updates the database with new stats\n5. Returns a detailed report of the analytics operation", + "operationId": "postV2Generate execution analytics", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/admin/execution_analytics/config": { + "get": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Get Execution Analytics Configuration", + "description": "Get the configuration for execution analytics including:\n- Available AI models with metadata\n- Default system and user prompts\n- Recommended model selection", + "operationId": "getV2Get execution analytics configuration", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsConfig" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/{graph_exec_id}": { + "delete": { + "tags": ["v1", "graphs"], + "summary": "Delete graph execution", + "operationId": "deleteV1Delete graph execution", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/files/upload": { + "post": { + "tags": ["v1", "files"], + "summary": "Upload file to cloud storage", + "description": "Upload a file to cloud storage and return a storage key that can be used\nwith FileStoreBlock and AgentFileInputBlock.\n\nArgs:\n file: The file to upload\n user_id: The user ID\n provider: Cloud storage provider (\"gcs\", \"s3\", \"azure\")\n expiration_hours: Hours until file expires (1-48)\n\nReturns:\n Dict containing the cloud storage path and signed URL", + "operationId": "postV1Upload file to cloud storage", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "query", + "required": false, + "schema": { + "type": "string", + "default": "gcs", + "title": "Provider" + } + }, + { + "name": "expiration_hours", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 24, + "title": "Expiration Hours" + } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postV1Upload_file_to_cloud_storage" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UploadFileResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List user graphs", + "operationId": "getV1List user graphs", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/GraphMeta" }, + "type": "array", + "title": "Response Getv1List User Graphs" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "graphs"], + "summary": "Create new graph", + "operationId": "postV1Create new graph", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreateGraph" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/graphs/{graph_id}": { + "delete": { + "tags": ["v1", "graphs"], + "summary": "Delete graph permanently", + "operationId": "deleteV1Delete graph permanently", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/DeleteGraphResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v1", "graphs"], + "summary": "Get specific graph", + "operationId": "getV1Get specific graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + }, + { + "name": "for_export", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "For Export" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "put": { + "tags": ["v1", "graphs"], + "summary": "Update graph version", + "operationId": "putV1Update graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Graph" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/execute/{graph_version}": { + "post": { + "tags": ["v1", "graphs"], + "summary": "Execute graph agent", + "operationId": "postV1Execute graph agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_version", + "in": "path", + "required": true, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version" + } + }, + { + "name": "preset_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Preset Id" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV1Execute_graph_agent" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List graph executions", + "operationId": "getV1List graph executions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number (1-indexed)", + "default": 1, + "title": "Page" + }, + "description": "Page number (1-indexed)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 100, + "minimum": 1, + "description": "Number of executions per page", + "default": 25, + "title": "Page Size" + }, + "description": "Number of executions per page" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GraphExecutionsPaginated" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get execution details", + "operationId": "getV1Get execution details", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/GraphExecution" }, + { "$ref": "#/components/schemas/GraphExecutionWithNodes" } + ], + "title": "Response Getv1Get Execution Details" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}/share": { + "delete": { + "tags": ["v1"], + "summary": "Disable Execution Sharing", + "description": "Disable sharing for a graph execution.", + "operationId": "deleteV1DisableExecutionSharing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v1"], + "summary": "Enable Execution Sharing", + "description": "Enable sharing for a graph execution.", + "operationId": "postV1EnableExecutionSharing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ShareRequest", + "default": {} + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ShareResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}/stop": { + "post": { + "tags": ["v1", "graphs"], + "summary": "Stop graph execution", + "operationId": "postV1Stop graph execution", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/GraphExecutionMeta" }, + { "type": "null" } + ], + "title": "Response Postv1Stop Graph Execution" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/schedules": { + "get": { + "tags": ["v1", "schedules"], + "summary": "List execution schedules for a graph", + "operationId": "getV1List execution schedules for a graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/GraphExecutionJobInfo" + }, + "title": "Response Getv1List Execution Schedules For A Graph" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v1", "schedules"], + "summary": "Create execution schedule", + "operationId": "postV1Create execution schedule", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "description": "ID of the graph to schedule", + "title": "Graph Id" + }, + "description": "ID of the graph to schedule" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ScheduleCreationRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GraphExecutionJobInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/settings": { + "patch": { + "tags": ["v1", "graphs"], + "summary": "Update graph settings", + "description": "Update graph settings for the user's library agent.", + "operationId": "patchV1Update graph settings", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphSettings" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphSettings" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get all graph versions", + "operationId": "getV1Get all graph versions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { "$ref": "#/components/schemas/GraphModel" }, + "title": "Response Getv1Get All Graph Versions" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions/active": { + "put": { + "tags": ["v1", "graphs"], + "summary": "Set active graph version", + "operationId": "putV1Set active graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SetGraphActiveVersion" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions/{version}": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get graph version", + "operationId": "getV1Get graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "path", + "required": true, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + }, + { + "name": "for_export", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "For Export" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/integrations/ayrshare/sso_url": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Ayrshare Sso Url", + "description": "Generate an SSO URL for Ayrshare social media integration.\n\nReturns:\n dict: Contains the SSO URL for Ayrshare integration", + "operationId": "getV1GetAyrshareSsoUrl", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AyrshareSSOResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/integrations/credentials": { + "get": { + "tags": ["v1", "integrations"], + "summary": "List Credentials", + "operationId": "getV1List credentials", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/CredentialsMetaResponse" + }, + "type": "array", + "title": "Response Getv1List Credentials" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/integrations/providers": { + "get": { + "tags": ["v1", "integrations"], + "summary": "List Providers", + "description": "Get a list of all available provider names.\n\nReturns both statically defined providers (from ProviderName enum)\nand dynamically registered providers (from SDK decorators).\n\nNote: The complete list of provider names is also available as a constant\nin the generated TypeScript client via PROVIDER_NAMES.", + "operationId": "getV1ListProviders", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "type": "string" }, + "type": "array", + "title": "Response Getv1Listproviders" + } + } + } + } + } + } + }, + "/api/integrations/providers/constants": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Constants", + "description": "Get provider names as constants.\n\nThis endpoint returns a model with provider names as constants,\nspecifically designed for OpenAPI code generation tools to create\nTypeScript constants.", + "operationId": "getV1GetProviderConstants", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ProviderConstants" } + } + } + } + } + } + }, + "/api/integrations/providers/enum-example": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Enum Example", + "description": "Example endpoint that uses the CompleteProviderNames enum.\n\nThis endpoint exists to ensure that the CompleteProviderNames enum is included\nin the OpenAPI schema, which will cause Orval to generate it as a\nTypeScript enum/constant.", + "operationId": "getV1GetProviderEnumExample", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderEnumResponse" + } + } + } + } + } + } + }, + "/api/integrations/providers/names": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Names", + "description": "Get all provider names in a structured format.\n\nThis endpoint is specifically designed to expose the provider names\nin the OpenAPI schema so that code generators like Orval can create\nappropriate TypeScript constants.", + "operationId": "getV1GetProviderNames", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderNamesResponse" + } + } + } + } + } + } + }, + "/api/integrations/webhooks/{webhook_id}/ping": { + "post": { + "tags": ["v1", "integrations"], + "summary": "Webhook Ping", + "operationId": "postV1WebhookPing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "webhook_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Our ID for the webhook" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -96,6 +2847,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -103,40 +2857,10 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/integrations/credentials": { - "get": { - "tags": ["v1", "integrations"], - "summary": "List Credentials", - "operationId": "getV1List credentials", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/CredentialsMetaResponse" - }, - "type": "array", - "title": "Response Getv1List Credentials" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, "/api/integrations/{provider}/credentials": { "get": { "tags": ["v1", "integrations"], @@ -170,6 +2894,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -177,9 +2904,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } }, @@ -253,6 +2977,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -260,81 +2987,11 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, "/api/integrations/{provider}/credentials/{cred_id}": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Specific Credential By ID", - "operationId": "getV1Get specific credential by id", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The provider to retrieve credentials for", - "description": "Provider name for integrations. Can be any string value, including custom provider names." - } - }, - { - "name": "cred_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The ID of the credentials to retrieve" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "oneOf": [ - { "$ref": "#/components/schemas/OAuth2Credentials" }, - { "$ref": "#/components/schemas/APIKeyCredentials" }, - { "$ref": "#/components/schemas/UserPasswordCredentials" }, - { - "$ref": "#/components/schemas/HostScopedCredentials-Output" - } - ], - "discriminator": { - "propertyName": "type", - "mapping": { - "oauth2": "#/components/schemas/OAuth2Credentials", - "api_key": "#/components/schemas/APIKeyCredentials", - "user_password": "#/components/schemas/UserPasswordCredentials", - "host_scoped": "#/components/schemas/HostScopedCredentials-Output" - } - }, - "title": "Response Getv1Get Specific Credential By Id" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, "delete": { "tags": ["v1", "integrations"], "summary": "Delete Credentials", @@ -390,6 +3047,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -397,9 +3057,124 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + }, + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Specific Credential By ID", + "operationId": "getV1Get specific credential by id", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The provider to retrieve credentials for", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + } + }, + { + "name": "cred_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The ID of the credentials to retrieve" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { "$ref": "#/components/schemas/OAuth2Credentials" }, + { "$ref": "#/components/schemas/APIKeyCredentials" }, + { "$ref": "#/components/schemas/UserPasswordCredentials" }, + { + "$ref": "#/components/schemas/HostScopedCredentials-Output" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "oauth2": "#/components/schemas/OAuth2Credentials", + "api_key": "#/components/schemas/APIKeyCredentials", + "user_password": "#/components/schemas/UserPasswordCredentials", + "host_scoped": "#/components/schemas/HostScopedCredentials-Output" + } + }, + "title": "Response Getv1Get Specific Credential By Id" + } + } + } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/integrations/{provider}/login": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Initiate OAuth flow", + "operationId": "getV1Initiate oauth flow", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The provider to initiate an OAuth flow for", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + } + }, + { + "name": "scopes", + "in": "query", + "required": false, + "schema": { + "type": "string", + "title": "Comma-separated list of authorization scopes", + "default": "" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LoginResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -443,18 +3218,300 @@ } } }, - "/api/integrations/webhooks/{webhook_id}/ping": { - "post": { - "tags": ["v1", "integrations"], - "summary": "Webhook Ping", - "operationId": "postV1WebhookPing", + "/api/library/agents": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "List Library Agents", + "description": "Get all agents in the user's library (both created and saved).\n\nArgs:\n user_id: ID of the authenticated user.\n search_term: Optional search term to filter agents by name/description.\n filter_by: List of filters to apply (favorites, created by user).\n sort_by: List of sorting criteria (created date, updated date).\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", + "operationId": "getV2List library agents", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "webhook_id", + "name": "search_term", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "description": "Search term to filter agents", + "title": "Search Term" + }, + "description": "Search term to filter agents" + }, + { + "name": "sort_by", + "in": "query", + "required": false, + "schema": { + "$ref": "#/components/schemas/LibraryAgentSort", + "description": "Criteria to sort results by", + "default": "updatedAt" + }, + "description": "Criteria to sort results by" + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number to retrieve (must be >= 1)", + "default": 1, + "title": "Page" + }, + "description": "Page number to retrieve (must be >= 1)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Number of agents per page (must be >= 1)", + "default": 15, + "title": "Page Size" + }, + "description": "Number of agents per page (must be >= 1)" + } + ], + "responses": { + "200": { + "description": "List of library agents", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + }, + "post": { + "tags": ["v2", "library", "private"], + "summary": "Add Marketplace Agent", + "description": "Add an agent from the marketplace to the user's library.\n\nArgs:\n store_listing_version_id: ID of the store listing version to add.\n user_id: ID of the authenticated user.\n\nReturns:\n library_model.LibraryAgent: Agent added to the library\n\nRaises:\n HTTPException(404): If the listing version is not found.\n HTTPException(500): If a server/database error occurs.", + "operationId": "postV2Add marketplace agent", + "security": [{ "HTTPBearerJWT": [] }], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Add_marketplace_agent" + } + } + } + }, + "responses": { + "201": { + "description": "Agent added successfully", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Store listing version not found" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { "description": "Server error" } + } + } + }, + "/api/library/agents/by-graph/{graph_id}": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "Get Library Agent By Graph Id", + "operationId": "getV2GetLibraryAgentByGraphId", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Our ID for the webhook" } + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/agents/favorites": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "List Favorite Library Agents", + "description": "Get all favorite agents in the user's library.\n\nArgs:\n user_id: ID of the authenticated user.\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing favorite agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", + "operationId": "getV2List favorite library agents", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number to retrieve (must be >= 1)", + "default": 1, + "title": "Page" + }, + "description": "Page number to retrieve (must be >= 1)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Number of agents per page (must be >= 1)", + "default": 15, + "title": "Page Size" + }, + "description": "Number of agents per page (must be >= 1)" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + } + }, + "/api/library/agents/marketplace/{store_listing_version_id}": { + "get": { + "tags": ["v2", "library", "private", "store", "library"], + "summary": "Get Agent By Store ID", + "description": "Get Library Agent from Store Listing Version ID.", + "operationId": "getV2Get agent by store id", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Library agent found", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/LibraryAgent" }, + { "type": "null" } + ], + "title": "Response Getv2Get Agent By Store Id" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Agent not found" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/agents/{library_agent_id}": { + "delete": { + "tags": ["v2", "library", "private"], + "summary": "Delete Library Agent", + "description": "Soft-delete the specified library agent.\n\nArgs:\n library_agent_id: ID of the library agent to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.\n\nRaises:\n HTTPException(404): If the agent does not exist.\n HTTPException(500): If a server/database error occurs.", + "operationId": "deleteV2Delete library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } } ], "responses": { @@ -462,6 +3519,11 @@ "description": "Successful Response", "content": { "application/json": { "schema": {} } } }, + "204": { "description": "Agent deleted successfully" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Agent not found" }, "422": { "description": "Validation Error", "content": { @@ -470,139 +3532,503 @@ } } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } + "500": { "description": "Server error" } } - } - }, - "/api/integrations/ayrshare/sso_url": { + }, "get": { - "tags": ["v1", "integrations"], - "summary": "Get Ayrshare Sso Url", - "description": "Generate an SSO URL for Ayrshare social media integration.\n\nReturns:\n dict: Contains the SSO URL for Ayrshare integration", - "operationId": "getV1GetAyrshareSsoUrl", + "tags": ["v2", "library", "private"], + "summary": "Get Library Agent", + "operationId": "getV2Get library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/AyrshareSSOResponse" } + "schema": { "$ref": "#/components/schemas/LibraryAgent" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "patch": { + "tags": ["v2", "library", "private"], + "summary": "Update Library Agent", + "description": "Update the library agent with the given fields.\n\nArgs:\n library_agent_id: ID of the library agent to update.\n payload: Fields to update (auto_update_version, is_favorite, etc.).\n user_id: ID of the authenticated user.\n\nRaises:\n HTTPException(500): If a server/database error occurs.", + "operationId": "patchV2Update library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentUpdateRequest" + } + } } }, - "security": [{ "HTTPBearerJWT": [] }] + "responses": { + "200": { + "description": "Agent updated successfully", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { "description": "Server error" } + } } }, - "/api/integrations/providers": { + "/api/library/agents/{library_agent_id}/fork": { + "post": { + "tags": ["v2", "library", "private"], + "summary": "Fork Library Agent", + "operationId": "postV2Fork library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets": { "get": { - "tags": ["v1", "integrations"], - "summary": "List Providers", - "description": "Get a list of all available provider names.\n\nReturns both statically defined providers (from ProviderName enum)\nand dynamically registered providers (from SDK decorators).\n\nNote: The complete list of provider names is also available as a constant\nin the generated TypeScript client via PROVIDER_NAMES.", - "operationId": "getV1ListProviders", + "tags": ["v2", "presets"], + "summary": "List presets", + "description": "Retrieve a paginated list of presets for the current user.", + "operationId": "getV2List presets", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "default": 1, + "title": "Page" + } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "default": 10, + "title": "Page Size" + } + }, + { + "name": "graph_id", + "in": "query", + "required": true, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "description": "Allows to filter presets by a specific agent graph", + "title": "Graph Id" + }, + "description": "Allows to filter presets by a specific agent graph" + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "items": { "type": "string" }, + "$ref": "#/components/schemas/LibraryAgentPresetResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v2", "presets"], + "summary": "Create a new preset", + "description": "Create a new preset for the current user.", + "operationId": "postV2Create a new preset", + "security": [{ "HTTPBearerJWT": [] }], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "anyOf": [ + { + "$ref": "#/components/schemas/LibraryAgentPresetCreatable" + }, + { + "$ref": "#/components/schemas/LibraryAgentPresetCreatableFromGraphExecution" + } + ], + "title": "Preset" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets/setup-trigger": { + "post": { + "tags": ["v2", "presets"], + "summary": "Setup Trigger", + "description": "Sets up a webhook-triggered `LibraryAgentPreset` for a `LibraryAgent`.\nReturns the correspondingly created `LibraryAgentPreset` with `webhook_id` set.", + "operationId": "postV2SetupTrigger", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TriggeredPresetSetupRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/library/presets/{preset_id}": { + "delete": { + "tags": ["v2", "presets"], + "summary": "Delete a preset", + "description": "Delete an existing preset by its ID.", + "operationId": "deleteV2Delete a preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v2", "presets"], + "summary": "Get a specific preset", + "description": "Retrieve details for a specific preset by its ID.", + "operationId": "getV2Get a specific preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "patch": { + "tags": ["v2", "presets"], + "summary": "Update an existing preset", + "description": "Update an existing preset by its ID.", + "operationId": "patchV2Update an existing preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentPresetUpdatable" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets/{preset_id}/execute": { + "post": { + "tags": ["v2", "presets", "presets"], + "summary": "Execute a preset", + "description": "Execute a preset with the given graph and node input for the current user.", + "operationId": "postV2Execute a preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Execute_a_preset" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/app/{client_id}": { + "get": { + "tags": ["oauth"], + "summary": "Get Oauth App Info", + "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", + "operationId": "getOauthGetOauthAppInfo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "client_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Client Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationPublicInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Application not found or disabled" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/mine": { + "get": { + "tags": ["oauth"], + "summary": "List My Oauth Apps", + "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", + "operationId": "getOauthListMyOauthApps", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + }, "type": "array", - "title": "Response Getv1Listproviders" + "title": "Response Getoauthlistmyoauthapps" } } } - } - } - } - }, - "/api/integrations/providers/names": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Names", - "description": "Get all provider names in a structured format.\n\nThis endpoint is specifically designed to expose the provider names\nin the OpenAPI schema so that code generators like Orval can create\nappropriate TypeScript constants.", - "operationId": "getV1GetProviderNames", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProviderNamesResponse" - } - } - } - } - } - } - }, - "/api/integrations/providers/constants": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Constants", - "description": "Get provider names as constants.\n\nThis endpoint returns a model with provider names as constants,\nspecifically designed for OpenAPI code generation tools to create\nTypeScript constants.", - "operationId": "getV1GetProviderConstants", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProviderConstants" } - } - } - } - } - } - }, - "/api/integrations/providers/enum-example": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Enum Example", - "description": "Example endpoint that uses the CompleteProviderNames enum.\n\nThis endpoint exists to ensure that the CompleteProviderNames enum is included\nin the OpenAPI schema, which will cause Orval to generate it as a\nTypeScript enum/constant.", - "operationId": "getV1GetProviderEnumExample", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProviderEnumResponse" - } - } - } - } - } - } - }, - "/api/analytics/log_raw_metric": { - "post": { - "tags": ["v1", "analytics"], - "summary": "Log Raw Metric", - "operationId": "postV1LogRawMetric", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LogRawMetricRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" @@ -611,21 +4037,244 @@ "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/analytics/log_raw_analytics": { + "/api/oauth/apps/{app_id}/logo": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Logo", + "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/{app_id}/logo/upload": { "post": { - "tags": ["v1", "analytics"], - "summary": "Log Raw Analytics", - "operationId": "postV1LogRawAnalytics", + "tags": ["oauth"], + "summary": "Upload App Logo", + "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", + "operationId": "postOauthUploadAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/{app_id}/status": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Status", + "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppStatus", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/authorize": { + "post": { + "tags": ["oauth"], + "summary": "Authorize", + "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", + "operationId": "postOauthAuthorize", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/introspect": { + "post": { + "tags": ["oauth"], + "summary": "Introspect", + "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", + "operationId": "postOauthIntrospect", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Body_postV1LogRawAnalytics" + "$ref": "#/components/schemas/Body_postOauthIntrospect" } } }, "required": true }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TokenIntrospectionResult" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/revoke": { + "post": { + "tags": ["oauth"], + "summary": "Revoke", + "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", + "operationId": "postOauthRevoke", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } + } + }, + "required": true + }, "responses": { "200": { "description": "Successful Response", @@ -638,161 +4287,25 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, - "/api/auth/user": { + "/api/oauth/token": { "post": { - "tags": ["v1", "auth"], - "summary": "Get or create user", - "operationId": "postV1Get or create user", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/email": { - "post": { - "tags": ["v1", "auth"], - "summary": "Update user email", - "operationId": "postV1Update user email", - "requestBody": { - "content": { - "application/json": { - "schema": { "type": "string", "title": "Email" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "string" }, - "type": "object", - "title": "Response Postv1Update User Email" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/timezone": { - "get": { - "tags": ["v1", "auth"], - "summary": "Get user timezone", - "description": "Get user timezone setting.", - "operationId": "getV1Get user timezone", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TimezoneResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "auth"], - "summary": "Update user timezone", - "description": "Update user timezone. The timezone should be a valid IANA timezone identifier.", - "operationId": "postV1Update user timezone", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UpdateTimezoneRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TimezoneResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/preferences": { - "get": { - "tags": ["v1", "auth"], - "summary": "Get notification preferences", - "operationId": "getV1Get notification preferences", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/NotificationPreference" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "auth"], - "summary": "Update notification preferences", - "operationId": "postV1Update notification preferences", + "tags": ["oauth"], + "summary": "Token", + "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", + "operationId": "postOauthToken", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/NotificationPreferenceDTO" + "anyOf": [ + { "$ref": "#/components/schemas/TokenRequestByCode" }, + { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } + ], + "title": "Request" } } }, @@ -803,9 +4316,7 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/NotificationPreference" - } + "schema": { "$ref": "#/components/schemas/TokenResponse" } } } }, @@ -816,12 +4327,8 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, "/api/onboarding": { @@ -865,6 +4372,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -872,61 +4382,11 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } }, "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/onboarding/step": { - "post": { - "tags": ["v1", "onboarding"], - "summary": "Complete onboarding step", - "operationId": "postV1Complete onboarding step", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "step", - "in": "query", - "required": true, - "schema": { - "enum": [ - "WELCOME", - "USAGE_REASON", - "INTEGRATIONS", - "AGENT_CHOICE", - "AGENT_NEW_RUN", - "AGENT_INPUT", - "CONGRATS", - "MARKETPLACE_VISIT", - "BUILDER_OPEN" - ], - "type": "string", - "title": "Step" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, "/api/onboarding/agents": { "get": { "tags": ["v1", "onboarding"], @@ -997,413 +4457,41 @@ "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/blocks": { - "get": { - "tags": ["v1", "blocks"], - "summary": "List available blocks", - "operationId": "getV1List available blocks", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "additionalProperties": true, "type": "object" }, - "type": "array", - "title": "Response Getv1List Available Blocks" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/blocks/{block_id}/execute": { + "/api/onboarding/step": { "post": { - "tags": ["v1", "blocks"], - "summary": "Execute graph block", - "operationId": "postV1Execute graph block", + "tags": ["v1", "onboarding"], + "summary": "Complete onboarding step", + "operationId": "postV1Complete onboarding step", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "block_id", - "in": "path", + "name": "step", + "in": "query", "required": true, - "schema": { "type": "string", "title": "Block Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": true, - "title": "Data" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": { "type": "array", "items": {} }, - "title": "Response Postv1Execute Graph Block" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/files/upload": { - "post": { - "tags": ["v1", "files"], - "summary": "Upload file to cloud storage", - "description": "Upload a file to cloud storage and return a storage key that can be used\nwith FileStoreBlock and AgentFileInputBlock.\n\nArgs:\n file: The file to upload\n user_id: The user ID\n provider: Cloud storage provider (\"gcs\", \"s3\", \"azure\")\n expiration_hours: Hours until file expires (1-48)\n\nReturns:\n Dict containing the cloud storage path and signed URL", - "operationId": "postV1Upload file to cloud storage", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "query", - "required": false, "schema": { - "type": "string", - "default": "gcs", - "title": "Provider" - } - }, - { - "name": "expiration_hours", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 24, - "title": "Expiration Hours" - } - } - ], - "requestBody": { - "required": true, - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postV1Upload_file_to_cloud_storage" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UploadFileResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get user credits", - "operationId": "getV1Get user credits", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "integer" }, - "type": "object", - "title": "Response Getv1Get User Credits" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "credits"], - "summary": "Request credit top up", - "operationId": "postV1Request credit top up", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/RequestTopUp" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "patch": { - "tags": ["v1", "credits"], - "summary": "Fulfill checkout session", - "operationId": "patchV1Fulfill checkout session", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/{transaction_key}/refund": { - "post": { - "tags": ["v1", "credits"], - "summary": "Refund credit transaction", - "operationId": "postV1Refund credit transaction", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "transaction_key", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Transaction Key" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": { "type": "string" }, - "title": "Metadata" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "integer", - "title": "Response Postv1Refund Credit Transaction" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits/auto-top-up": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get auto top up", - "operationId": "getV1Get auto top up", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "credits"], - "summary": "Configure auto top up", - "operationId": "postV1Configure auto top up", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "string", - "title": "Response Postv1Configure Auto Top Up" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/stripe_webhook": { - "post": { - "tags": ["v1", "credits"], - "summary": "Handle Stripe webhooks", - "operationId": "postV1Handle stripe webhooks", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - } - } - } - }, - "/api/credits/manage": { - "get": { - "tags": ["v1", "credits"], - "summary": "Manage payment methods", - "operationId": "getV1Manage payment methods", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "string" }, - "type": "object", - "title": "Response Getv1Manage Payment Methods" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/transactions": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get credit history", - "operationId": "getV1Get credit history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "transaction_time", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "type": "string", "format": "date-time" }, - { "type": "null" } + "enum": [ + "WELCOME", + "USAGE_REASON", + "INTEGRATIONS", + "AGENT_CHOICE", + "AGENT_NEW_RUN", + "AGENT_INPUT", + "CONGRATS", + "MARKETPLACE_VISIT", + "BUILDER_OPEN" ], - "title": "Transaction Time" - } - }, - { - "name": "transaction_type", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Transaction Type" - } - }, - { - "name": "transaction_count_limit", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 100, - "title": "Transaction Count Limit" + "type": "string", + "title": "Step" } } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TransactionHistory" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -1412,70 +4500,20 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/credits/refunds": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get refund requests", - "operationId": "getV1Get refund requests", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "$ref": "#/components/schemas/RefundRequest" }, - "type": "array", - "title": "Response Getv1Get Refund Requests" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List user graphs", - "operationId": "getV1List user graphs", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "$ref": "#/components/schemas/GraphMeta" }, - "type": "array", - "title": "Response Getv1List User Graphs" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, + "/api/otto/ask": { "post": { - "tags": ["v1", "graphs"], - "summary": "Create new graph", - "operationId": "postV1Create new graph", + "tags": ["v2", "otto"], + "summary": "Proxy Otto Chat Request", + "description": "Proxy requests to Otto API while adding necessary security headers and logging.\nRequires an authenticated user.", + "operationId": "postV2Proxy otto chat request", "requestBody": { "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/CreateGraph" } + "schema": { "$ref": "#/components/schemas/ChatRequest" } } }, "required": true @@ -1485,10 +4523,13 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } + "schema": { "$ref": "#/components/schemas/ApiResponse" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -1496,704 +4537,9 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs/{graph_id}/versions/{version}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get graph version", - "operationId": "getV1Get graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "path", - "required": true, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - }, - { - "name": "for_export", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "For Export" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get specific graph", - "operationId": "getV1Get specific graph", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - }, - { - "name": "for_export", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "For Export" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1", "graphs"], - "summary": "Delete graph permanently", - "operationId": "deleteV1Delete graph permanently", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/DeleteGraphResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "put": { - "tags": ["v1", "graphs"], - "summary": "Update graph version", - "operationId": "putV1Update graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Graph" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/versions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get all graph versions", - "operationId": "getV1Get all graph versions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { "$ref": "#/components/schemas/GraphModel" }, - "title": "Response Getv1Get All Graph Versions" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/versions/active": { - "put": { - "tags": ["v1", "graphs"], - "summary": "Set active graph version", - "operationId": "putV1Set active graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SetGraphActiveVersion" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/settings": { - "patch": { - "tags": ["v1", "graphs"], - "summary": "Update graph settings", - "description": "Update graph settings for the user's library agent.", - "operationId": "patchV1Update graph settings", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphSettings" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphSettings" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/execute/{graph_version}": { - "post": { - "tags": ["v1", "graphs"], - "summary": "Execute graph agent", - "operationId": "postV1Execute graph agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_version", - "in": "path", - "required": true, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Graph Version" - } - }, - { - "name": "preset_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Preset Id" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV1Execute_graph_agent" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}/stop": { - "post": { - "tags": ["v1", "graphs"], - "summary": "Stop graph execution", - "operationId": "postV1Stop graph execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/GraphExecutionMeta" }, - { "type": "null" } - ], - "title": "Response Postv1Stop Graph Execution" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List all executions", - "operationId": "getV1List all executions", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/GraphExecutionMeta" - }, - "type": "array", - "title": "Response Getv1List All Executions" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs/{graph_id}/executions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List graph executions", - "operationId": "getV1List graph executions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number (1-indexed)", - "default": 1, - "title": "Page" - }, - "description": "Page number (1-indexed)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "maximum": 100, - "minimum": 1, - "description": "Number of executions per page", - "default": 25, - "title": "Page Size" - }, - "description": "Number of executions per page" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GraphExecutionsPaginated" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get execution details", - "operationId": "getV1Get execution details", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/GraphExecution" }, - { "$ref": "#/components/schemas/GraphExecutionWithNodes" } - ], - "title": "Response Getv1Get Execution Details" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions/{graph_exec_id}": { - "delete": { - "tags": ["v1", "graphs"], - "summary": "Delete graph execution", - "operationId": "deleteV1Delete graph execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}/share": { - "post": { - "tags": ["v1"], - "summary": "Enable Execution Sharing", - "description": "Enable sharing for a graph execution.", - "operationId": "postV1EnableExecutionSharing", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ShareRequest", - "default": {} - } - } } }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ShareResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1"], - "summary": "Disable Execution Sharing", - "description": "Disable sharing for a graph execution.", - "operationId": "deleteV1DisableExecutionSharing", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } + "security": [{ "HTTPBearerJWT": [] }] } }, "/api/public/shared/{share_token}": { @@ -2236,46 +4582,32 @@ } } }, - "/api/graphs/{graph_id}/schedules": { + "/api/review/action": { "post": { - "tags": ["v1", "schedules"], - "summary": "Create execution schedule", - "operationId": "postV1Create execution schedule", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "ID of the graph to schedule", - "title": "Graph Id" - }, - "description": "ID of the graph to schedule" - } - ], + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Process Review Action", + "description": "Process reviews with approve or reject actions.", + "operationId": "postV2ProcessReviewAction", "requestBody": { - "required": true, "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/ScheduleCreationRequest" - } + "schema": { "$ref": "#/components/schemas/ReviewRequest" } } - } + }, + "required": true }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/GraphExecutionJobInfo" - } + "schema": { "$ref": "#/components/schemas/ReviewResponse" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2283,40 +4615,115 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - } - }, + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/review/execution/{graph_exec_id}": { "get": { - "tags": ["v1", "schedules"], - "summary": "List execution schedules for a graph", - "operationId": "getV1List execution schedules for a graph", + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Get Pending Reviews for Execution", + "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", + "operationId": "getV2Get pending reviews for execution", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "graph_id", + "name": "graph_exec_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Graph Id" } + "schema": { "type": "string", "title": "Graph Exec Id" } } ], "responses": { "200": { - "description": "Successful Response", + "description": "List of pending reviews for the execution", "content": { "application/json": { "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/GraphExecutionJobInfo" + "$ref": "#/components/schemas/PendingHumanReviewModel" }, - "title": "Response Getv1List Execution Schedules For A Graph" + "title": "Response Getv2Get Pending Reviews For Execution" } } } }, + "400": { "description": "Invalid graph execution ID" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "403": { "description": "Access denied to graph execution" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + } + }, + "/api/review/pending": { + "get": { + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Get Pending Reviews", + "description": "Get all pending reviews for the current user.\n\nRetrieves all reviews with status \"WAITING\" that belong to the authenticated user.\nResults are ordered by creation time (newest first).\n\nArgs:\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects with status converted to typed literals\n\nRaises:\n HTTPException: If authentication fails or database error occurs\n\nNote:\n Reviews with invalid status values are logged as warnings but excluded\n from results rather than failing the entire request.", + "operationId": "getV2Get pending reviews", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number (1-indexed)", + "default": 1, + "title": "Page" + }, + "description": "Page number (1-indexed)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 100, + "minimum": 1, + "description": "Number of reviews per page", + "default": 25, + "title": "Page Size" + }, + "description": "Number of reviews per page" + } + ], + "responses": { + "200": { + "description": "List of pending reviews", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PendingHumanReviewModel" + }, + "title": "Response Getv2Get Pending Reviews" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2325,8 +4732,9 @@ } } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + "500": { + "description": "Server error", + "content": { "application/json": {} } } } } @@ -2390,6 +4798,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2397,61 +4808,65 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/api-keys": { + "/api/store/admin/listings": { "get": { - "tags": ["v1", "api-keys"], - "summary": "List user API keys", - "description": "List all API keys for the user", - "operationId": "getV1List user api keys", + "tags": ["v2", "admin", "store", "admin"], + "summary": "Get Admin Listings History", + "description": "Get store listings with their version history for admins.\n\nThis provides a consolidated view of listings with their versions,\nallowing for an expandable UI in the admin dashboard.\n\nArgs:\n status: Filter by submission status (PENDING, APPROVED, REJECTED)\n search: Search by name, description, or user email\n page: Page number for pagination\n page_size: Number of items per page\n\nReturns:\n StoreListingsWithVersionsResponse with listings and their versions", + "operationId": "getV2Get admin listings history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "status", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/SubmissionStatus" }, + { "type": "null" } + ], + "title": "Status" + } + }, + { + "name": "search", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 20, "title": "Page Size" } + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "items": { "$ref": "#/components/schemas/APIKeyInfo" }, - "type": "array", - "title": "Response Getv1List User Api Keys" + "$ref": "#/components/schemas/StoreListingsWithVersionsResponse" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "api-keys"], - "summary": "Create new API key", - "description": "Create a new API key", - "operationId": "postV1Create new api key", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreateAPIKeyRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateAPIKeyResponse" - } - } - } }, "422": { "description": "Validation Error", @@ -2460,37 +4875,37 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, - "/api/api-keys/{key_id}": { + "/api/store/admin/submissions/download/{store_listing_version_id}": { "get": { - "tags": ["v1", "api-keys"], - "summary": "Get specific API key", - "description": "Get a specific API key", - "operationId": "getV1Get specific api key", + "tags": ["v2", "admin", "store", "admin", "store", "admin"], + "summary": "Admin Download Agent File", + "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", + "operationId": "getV2Admin download agent file", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "key_id", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Key Id" } + "schema": { + "type": "string", + "description": "The ID of the agent to download", + "title": "Store Listing Version Id" + }, + "description": "The ID of the agent to download" } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -2499,100 +4914,23 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1", "api-keys"], - "summary": "Revoke API key", - "description": "Revoke an API key", - "operationId": "deleteV1Revoke api key", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "key_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Key Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/api-keys/{key_id}/suspend": { + "/api/store/admin/submissions/{store_listing_version_id}/review": { "post": { - "tags": ["v1", "api-keys"], - "summary": "Suspend API key", - "description": "Suspend an API key", - "operationId": "postV1Suspend api key", + "tags": ["v2", "admin", "store", "admin"], + "summary": "Review Store Submission", + "description": "Review a store listing submission.\n\nArgs:\n store_listing_version_id: ID of the submission to review\n request: Review details including approval status and comments\n user_id: Authenticated admin user performing the review\n\nReturns:\n StoreSubmission with updated review information", + "operationId": "postV2Review store submission", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "key_id", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Key Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/api-keys/{key_id}/permissions": { - "put": { - "tags": ["v1", "api-keys"], - "summary": "Update key permissions", - "description": "Update API key permissions", - "operationId": "putV1Update key permissions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "key_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Key Id" } + "schema": { "type": "string", "title": "Store Listing Version Id" } } ], "requestBody": { @@ -2600,7 +4938,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdatePermissionsRequest" + "$ref": "#/components/schemas/ReviewSubmissionRequest" } } } @@ -2610,10 +4948,13 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + "schema": { "$ref": "#/components/schemas/StoreSubmission" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2621,71 +4962,10 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/store/profile": { - "get": { - "tags": ["v2", "store", "private"], - "summary": "Get user profile", - "description": "Get the profile details for the authenticated user.\nCached for 1 hour per user.", - "operationId": "getV2Get user profile", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProfileDetails" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v2", "store", "private"], - "summary": "Update user profile", - "description": "Update the store profile for the authenticated user.\n\nArgs:\n profile (Profile): The updated profile details\n user_id (str): ID of the authenticated user\n\nReturns:\n CreatorDetails: The updated profile\n\nRaises:\n HTTPException: If there is an error updating the profile", - "operationId": "postV2Update user profile", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Profile" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreatorDetails" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, "/api/store/agents": { "get": { "tags": ["v2", "store", "public"], @@ -2778,6 +5058,44 @@ } } }, + "/api/store/agents/{store_listing_version_id}": { + "get": { + "tags": ["v2", "store"], + "summary": "Get agent by version", + "description": "Get Store Agent Details from Store Listing Version ID.", + "operationId": "getV2Get agent by version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/StoreAgentDetails" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, "/api/store/agents/{username}/{agent_name}": { "get": { "tags": ["v2", "store", "public"], @@ -2818,78 +5136,6 @@ } } }, - "/api/store/graph/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store"], - "summary": "Get agent graph", - "description": "Get Agent Graph from Store Listing Version ID.", - "operationId": "getV2Get agent graph", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/agents/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store"], - "summary": "Get agent by version", - "description": "Get Store Agent Details from Store Listing Version ID.", - "operationId": "getV2Get agent by version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/StoreAgentDetails" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, "/api/store/agents/{username}/{agent_name}/review": { "post": { "tags": ["v2", "store"], @@ -2928,6 +5174,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2935,9 +5184,40 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + } + }, + "/api/store/creator/{username}": { + "get": { + "tags": ["v2", "store", "public"], + "summary": "Get creator details", + "description": "Get the details of a creator.\n- Creator Details Page", + "operationId": "getV2Get creator details", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Username" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreatorDetails" } + } + } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -3016,28 +5296,29 @@ } } }, - "/api/store/creator/{username}": { + "/api/store/download/agents/{store_listing_version_id}": { "get": { "tags": ["v2", "store", "public"], - "summary": "Get creator details", - "description": "Get the details of a creator.\n- Creator Details Page", - "operationId": "getV2Get creator details", + "summary": "Download agent file", + "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", + "operationId": "getV2Download agent file", "parameters": [ { - "name": "username", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Username" } + "schema": { + "type": "string", + "description": "The ID of the agent to download", + "title": "Store Listing Version Id" + }, + "description": "The ID of the agent to download" } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreatorDetails" } - } - } + "content": { "application/json": { "schema": {} } } }, "422": { "description": "Validation Error", @@ -3050,6 +5331,54 @@ } } }, + "/api/store/graph/{store_listing_version_id}": { + "get": { + "tags": ["v2", "store"], + "summary": "Get agent graph", + "description": "Get Agent Graph from Store Listing Version ID.", + "operationId": "getV2Get agent graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/store/metrics/cache": { + "get": { + "tags": ["v2", "store", "metrics"], + "summary": "Get cache metrics in Prometheus format", + "description": "Get cache metrics in Prometheus text format.\n\nReturns Prometheus-compatible metrics for monitoring cache performance.\nMetrics include size, maxsize, TTL, and hit rate for each cache.\n\nReturns:\n str: Prometheus-formatted metrics text", + "operationId": "getV2Get cache metrics in prometheus format", + "responses": { + "200": { + "description": "Successful Response", + "content": { "text/plain": { "schema": { "type": "string" } } } + } + } + } + }, "/api/store/myagents": { "get": { "tags": ["v2", "store", "private"], @@ -3090,6 +5419,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3097,40 +5429,56 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/store/submissions/{submission_id}": { - "delete": { + "/api/store/profile": { + "get": { "tags": ["v2", "store", "private"], - "summary": "Delete store submission", - "description": "Delete a store listing submission.\n\nArgs:\n user_id (str): ID of the authenticated user\n submission_id (str): ID of the submission to be deleted\n\nReturns:\n bool: True if the submission was successfully deleted, False otherwise", - "operationId": "deleteV2Delete store submission", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "submission_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Submission Id" } - } - ], + "summary": "Get user profile", + "description": "Get the profile details for the authenticated user.\nCached for 1 hour per user.", + "operationId": "getV2Get user profile", "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "type": "boolean", - "title": "Response Deletev2Delete Store Submission" - } + "schema": { "$ref": "#/components/schemas/ProfileDetails" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v2", "store", "private"], + "summary": "Update user profile", + "description": "Update the store profile for the authenticated user.\n\nArgs:\n profile (Profile): The updated profile details\n user_id (str): ID of the authenticated user\n\nReturns:\n CreatorDetails: The updated profile\n\nRaises:\n HTTPException: If there is an error updating the profile", + "operationId": "postV2Update user profile", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Profile" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreatorDetails" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3138,11 +5486,9 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - } + }, + "security": [{ "HTTPBearerJWT": [] }] } }, "/api/store/submissions": { @@ -3177,6 +5523,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3184,9 +5533,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } }, @@ -3215,6 +5561,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3222,13 +5571,80 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + } + }, + "/api/store/submissions/generate_image": { + "post": { + "tags": ["v2", "store", "private"], + "summary": "Generate submission image", + "description": "Generate an image for a store listing submission.\n\nArgs:\n agent_id (str): ID of the agent to generate an image for\n user_id (str): ID of the authenticated user\n\nReturns:\n JSONResponse: JSON containing the URL of the generated image", + "operationId": "postV2Generate submission image", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "agent_id", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Agent Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } }, + "/api/store/submissions/media": { + "post": { + "tags": ["v2", "store", "private"], + "summary": "Upload submission media", + "description": "Upload media (images/videos) for a store listing submission.\n\nArgs:\n file (UploadFile): The media file to upload\n user_id (str): ID of the authenticated user uploading the media\n\nReturns:\n str: URL of the uploaded media file\n\nRaises:\n HTTPException: If there is an error uploading the media", + "operationId": "postV2Upload submission media", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Upload_submission_media" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, "/api/store/submissions/{store_listing_version_id}": { "put": { "tags": ["v2", "store", "private"], @@ -3263,113 +5679,8 @@ } } }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/submissions/media": { - "post": { - "tags": ["v2", "store", "private"], - "summary": "Upload submission media", - "description": "Upload media (images/videos) for a store listing submission.\n\nArgs:\n file (UploadFile): The media file to upload\n user_id (str): ID of the authenticated user uploading the media\n\nReturns:\n str: URL of the uploaded media file\n\nRaises:\n HTTPException: If there is an error uploading the media", - "operationId": "postV2Upload submission media", - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Upload_submission_media" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/store/submissions/generate_image": { - "post": { - "tags": ["v2", "store", "private"], - "summary": "Generate submission image", - "description": "Generate an image for a store listing submission.\n\nArgs:\n agent_id (str): ID of the agent to generate an image for\n user_id (str): ID of the authenticated user\n\nReturns:\n JSONResponse: JSON containing the URL of the generated image", - "operationId": "postV2Generate submission image", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "agent_id", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/download/agents/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store", "public"], - "summary": "Download agent file", - "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", - "operationId": "getV2Download agent file", - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The ID of the agent to download", - "title": "Store Listing Version Id" - }, - "description": "The ID of the agent to download" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } }, "422": { "description": "Validation Error", @@ -3382,1411 +5693,19 @@ } } }, - "/api/store/metrics/cache": { - "get": { - "tags": ["v2", "store", "metrics"], - "summary": "Get cache metrics in Prometheus format", - "description": "Get cache metrics in Prometheus text format.\n\nReturns Prometheus-compatible metrics for monitoring cache performance.\nMetrics include size, maxsize, TTL, and hit rate for each cache.\n\nReturns:\n str: Prometheus-formatted metrics text", - "operationId": "getV2Get cache metrics in prometheus format", - "responses": { - "200": { - "description": "Successful Response", - "content": { "text/plain": { "schema": { "type": "string" } } } - } - } - } - }, - "/api/builder/suggestions": { - "get": { - "tags": ["v2"], - "summary": "Get Builder suggestions", - "description": "Get all suggestions for the Blocks Menu.", - "operationId": "getV2Get builder suggestions", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SuggestionsResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/builder/categories": { - "get": { - "tags": ["v2"], - "summary": "Get Builder block categories", - "description": "Get all block categories with a specified number of blocks per category.", - "operationId": "getV2Get builder block categories", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "blocks_per_category", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 3, - "title": "Blocks Per Category" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/BlockCategoryResponse" - }, - "title": "Response Getv2Get Builder Block Categories" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/blocks": { - "get": { - "tags": ["v2"], - "summary": "Get Builder blocks", - "description": "Get blocks based on either category, type, or provider.", - "operationId": "getV2Get builder blocks", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "category", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Category" - } - }, - { - "name": "type", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "enum": ["all", "input", "action", "output"], - "type": "string" - }, - { "type": "null" } - ], - "title": "Type" - } - }, - { - "name": "provider", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "string", - "description": "Provider name for integrations. Can be any string value, including custom provider names." - }, - { "type": "null" } - ], - "title": "Provider" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/BlockResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/blocks/batch": { - "get": { - "tags": ["v2"], - "summary": "Get specific blocks", - "description": "Get specific blocks by their IDs.", - "operationId": "getV2Get specific blocks", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "block_ids", - "in": "query", - "required": true, - "schema": { - "type": "array", - "items": { "type": "string" }, - "title": "Block Ids" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { "$ref": "#/components/schemas/BlockInfo" }, - "title": "Response Getv2Get Specific Blocks" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/providers": { - "get": { - "tags": ["v2"], - "summary": "Get Builder integration providers", - "description": "Get all integration providers with their block counts.", - "operationId": "getV2Get builder integration providers", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProviderResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/search": { - "get": { - "tags": ["v2", "store", "private"], - "summary": "Builder search", - "description": "Search for blocks (including integrations), marketplace agents, and user library agents.", - "operationId": "getV2Builder search", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search_query", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search Query" - } - }, - { - "name": "filter", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "array", - "items": { - "enum": [ - "blocks", - "integrations", - "marketplace_agents", - "my_agents" - ], - "type": "string" - } - }, - { "type": "null" } - ], - "title": "Filter" - } - }, - { - "name": "search_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search Id" - } - }, - { - "name": "by_creator", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "type": "array", "items": { "type": "string" } }, - { "type": "null" } - ], - "title": "By Creator" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SearchResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/counts": { - "get": { - "tags": ["v2"], - "summary": "Get Builder item counts", - "description": "Get item counts for the menu categories in the Blocks Menu.", - "operationId": "getV2Get builder item counts", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CountResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/store/admin/listings": { - "get": { - "tags": ["v2", "admin", "store", "admin"], - "summary": "Get Admin Listings History", - "description": "Get store listings with their version history for admins.\n\nThis provides a consolidated view of listings with their versions,\nallowing for an expandable UI in the admin dashboard.\n\nArgs:\n status: Filter by submission status (PENDING, APPROVED, REJECTED)\n search: Search by name, description, or user email\n page: Page number for pagination\n page_size: Number of items per page\n\nReturns:\n StoreListingsWithVersionsResponse with listings and their versions", - "operationId": "getV2Get admin listings history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "status", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/SubmissionStatus" }, - { "type": "null" } - ], - "title": "Status" - } - }, - { - "name": "search", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 20, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/StoreListingsWithVersionsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/admin/submissions/{store_listing_version_id}/review": { - "post": { - "tags": ["v2", "admin", "store", "admin"], - "summary": "Review Store Submission", - "description": "Review a store listing submission.\n\nArgs:\n store_listing_version_id: ID of the submission to review\n request: Review details including approval status and comments\n user_id: Authenticated admin user performing the review\n\nReturns:\n StoreSubmission with updated review information", - "operationId": "postV2Review store submission", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ReviewSubmissionRequest" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/StoreSubmission" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/admin/submissions/download/{store_listing_version_id}": { - "get": { - "tags": ["v2", "admin", "store", "admin", "store", "admin"], - "summary": "Admin Download Agent File", - "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", - "operationId": "getV2Admin download agent file", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The ID of the agent to download", - "title": "Store Listing Version Id" - }, - "description": "The ID of the agent to download" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits/admin/add_credits": { - "post": { - "tags": ["v2", "admin", "credits", "admin"], - "summary": "Add Credits to User", - "operationId": "postV2Add credits to user", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Add_credits_to_user" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AddUserCreditsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/admin/users_history": { - "get": { - "tags": ["v2", "admin", "credits", "admin"], - "summary": "Get All Users History", - "operationId": "getV2Get all users history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 20, "title": "Page Size" } - }, - { - "name": "transaction_filter", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/CreditTransactionType" }, - { "type": "null" } - ], - "title": "Transaction Filter" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UserHistoryResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions/admin/execution_analytics/config": { - "get": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Get Execution Analytics Configuration", - "description": "Get the configuration for execution analytics including:\n- Available AI models with metadata\n- Default system and user prompts\n- Recommended model selection", - "operationId": "getV2Get execution analytics configuration", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsConfig" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/executions/admin/execution_analytics": { - "post": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Generate Execution Analytics", - "description": "Generate activity summaries and correctness scores for graph executions.\n\nThis endpoint:\n1. Fetches all completed executions matching the criteria\n2. Identifies executions missing activity_status or correctness_score\n3. Generates missing data using AI in batches\n4. Updates the database with new stats\n5. Returns a detailed report of the analytics operation", - "operationId": "postV2Generate execution analytics", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/executions/admin/execution_accuracy_trends": { - "get": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Get Execution Accuracy Trends and Alerts", - "description": "Get execution accuracy trends with moving averages and alert detection.\nSimple single-query approach.", - "operationId": "getV2Get execution accuracy trends and alerts", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "user_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "User Id" - } - }, - { - "name": "days_back", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 30, "title": "Days Back" } - }, - { - "name": "drop_threshold", - "in": "query", - "required": false, - "schema": { - "type": "number", - "default": 10.0, - "title": "Drop Threshold" - } - }, - { - "name": "include_historical", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "Include Historical" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AccuracyTrendsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/pending": { - "get": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Get Pending Reviews", - "description": "Get all pending reviews for the current user.\n\nRetrieves all reviews with status \"WAITING\" that belong to the authenticated user.\nResults are ordered by creation time (newest first).\n\nArgs:\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects with status converted to typed literals\n\nRaises:\n HTTPException: If authentication fails or database error occurs\n\nNote:\n Reviews with invalid status values are logged as warnings but excluded\n from results rather than failing the entire request.", - "operationId": "getV2Get pending reviews", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number (1-indexed)", - "default": 1, - "title": "Page" - }, - "description": "Page number (1-indexed)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "maximum": 100, - "minimum": 1, - "description": "Number of reviews per page", - "default": 25, - "title": "Page Size" - }, - "description": "Number of reviews per page" - } - ], - "responses": { - "200": { - "description": "List of pending reviews", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/PendingHumanReviewModel" - }, - "title": "Response Getv2Get Pending Reviews" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/execution/{graph_exec_id}": { - "get": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Get Pending Reviews for Execution", - "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", - "operationId": "getV2Get pending reviews for execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "List of pending reviews for the execution", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/PendingHumanReviewModel" - }, - "title": "Response Getv2Get Pending Reviews For Execution" - } - } - } - }, - "400": { "description": "Invalid graph execution ID" }, - "403": { "description": "Access denied to graph execution" }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/action": { - "post": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Process Review Action", - "description": "Process reviews with approve or reject actions.", - "operationId": "postV2ProcessReviewAction", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ReviewRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ReviewResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/library/presets": { - "get": { - "tags": ["v2", "presets"], - "summary": "List presets", - "description": "Retrieve a paginated list of presets for the current user.", - "operationId": "getV2List presets", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "default": 1, - "title": "Page" - } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "default": 10, - "title": "Page Size" - } - }, - { - "name": "graph_id", - "in": "query", - "required": true, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "description": "Allows to filter presets by a specific agent graph", - "title": "Graph Id" - }, - "description": "Allows to filter presets by a specific agent graph" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentPresetResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "post": { - "tags": ["v2", "presets"], - "summary": "Create a new preset", - "description": "Create a new preset for the current user.", - "operationId": "postV2Create a new preset", - "security": [{ "HTTPBearerJWT": [] }], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "anyOf": [ - { - "$ref": "#/components/schemas/LibraryAgentPresetCreatable" - }, - { - "$ref": "#/components/schemas/LibraryAgentPresetCreatableFromGraphExecution" - } - ], - "title": "Preset" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/presets/{preset_id}": { - "get": { - "tags": ["v2", "presets"], - "summary": "Get a specific preset", - "description": "Retrieve details for a specific preset by its ID.", - "operationId": "getV2Get a specific preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "patch": { - "tags": ["v2", "presets"], - "summary": "Update an existing preset", - "description": "Update an existing preset by its ID.", - "operationId": "patchV2Update an existing preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentPresetUpdatable" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, + "/api/store/submissions/{submission_id}": { "delete": { - "tags": ["v2", "presets"], - "summary": "Delete a preset", - "description": "Delete an existing preset by its ID.", - "operationId": "deleteV2Delete a preset", + "tags": ["v2", "store", "private"], + "summary": "Delete store submission", + "description": "Delete a store listing submission.\n\nArgs:\n user_id (str): ID of the authenticated user\n submission_id (str): ID of the submission to be deleted\n\nReturns:\n bool: True if the submission was successfully deleted, False otherwise", + "operationId": "deleteV2Delete store submission", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "preset_id", + "name": "submission_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/presets/setup-trigger": { - "post": { - "tags": ["v2", "presets"], - "summary": "Setup Trigger", - "description": "Sets up a webhook-triggered `LibraryAgentPreset` for a `LibraryAgent`.\nReturns the correspondingly created `LibraryAgentPreset` with `webhook_id` set.", - "operationId": "postV2SetupTrigger", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TriggeredPresetSetupRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/library/presets/{preset_id}/execute": { - "post": { - "tags": ["v2", "presets", "presets"], - "summary": "Execute a preset", - "description": "Execute a preset with the given graph and node input for the current user.", - "operationId": "postV2Execute a preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Execute_a_preset" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "List Library Agents", - "description": "Get all agents in the user's library (both created and saved).\n\nArgs:\n user_id: ID of the authenticated user.\n search_term: Optional search term to filter agents by name/description.\n filter_by: List of filters to apply (favorites, created by user).\n sort_by: List of sorting criteria (created date, updated date).\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", - "operationId": "getV2List library agents", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search_term", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "description": "Search term to filter agents", - "title": "Search Term" - }, - "description": "Search term to filter agents" - }, - { - "name": "sort_by", - "in": "query", - "required": false, - "schema": { - "$ref": "#/components/schemas/LibraryAgentSort", - "description": "Criteria to sort results by", - "default": "updatedAt" - }, - "description": "Criteria to sort results by" - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number to retrieve (must be >= 1)", - "default": 1, - "title": "Page" - }, - "description": "Page number to retrieve (must be >= 1)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Number of agents per page (must be >= 1)", - "default": 15, - "title": "Page Size" - }, - "description": "Number of agents per page (must be >= 1)" - } - ], - "responses": { - "200": { - "description": "List of library agents", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentResponse" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "post": { - "tags": ["v2", "library", "private"], - "summary": "Add Marketplace Agent", - "description": "Add an agent from the marketplace to the user's library.\n\nArgs:\n store_listing_version_id: ID of the store listing version to add.\n user_id: ID of the authenticated user.\n\nReturns:\n library_model.LibraryAgent: Agent added to the library\n\nRaises:\n HTTPException(404): If the listing version is not found.\n HTTPException(500): If a server/database error occurs.", - "operationId": "postV2Add marketplace agent", - "security": [{ "HTTPBearerJWT": [] }], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Add_marketplace_agent" - } - } - } - }, - "responses": { - "201": { - "description": "Agent added successfully", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "404": { "description": "Store listing version not found" }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/favorites": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "List Favorite Library Agents", - "description": "Get all favorite agents in the user's library.\n\nArgs:\n user_id: ID of the authenticated user.\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing favorite agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", - "operationId": "getV2List favorite library agents", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number to retrieve (must be >= 1)", - "default": 1, - "title": "Page" - }, - "description": "Page number to retrieve (must be >= 1)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Number of agents per page (must be >= 1)", - "default": 15, - "title": "Page Size" - }, - "description": "Number of agents per page (must be >= 1)" + "schema": { "type": "string", "title": "Submission Id" } } ], "responses": { @@ -4795,447 +5714,14 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LibraryAgentResponse" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/{library_agent_id}": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "Get Library Agent", - "operationId": "getV2Get library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "patch": { - "tags": ["v2", "library", "private"], - "summary": "Update Library Agent", - "description": "Update the library agent with the given fields.\n\nArgs:\n library_agent_id: ID of the library agent to update.\n payload: Fields to update (auto_update_version, is_favorite, etc.).\n user_id: ID of the authenticated user.\n\nRaises:\n HTTPException(500): If a server/database error occurs.", - "operationId": "patchV2Update library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentUpdateRequest" - } - } - } - }, - "responses": { - "200": { - "description": "Agent updated successfully", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v2", "library", "private"], - "summary": "Delete Library Agent", - "description": "Soft-delete the specified library agent.\n\nArgs:\n library_agent_id: ID of the library agent to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.\n\nRaises:\n HTTPException(404): If the agent does not exist.\n HTTPException(500): If a server/database error occurs.", - "operationId": "deleteV2Delete library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "204": { "description": "Agent deleted successfully" }, - "404": { "description": "Agent not found" }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/by-graph/{graph_id}": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "Get Library Agent By Graph Id", - "operationId": "getV2GetLibraryAgentByGraphId", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/marketplace/{store_listing_version_id}": { - "get": { - "tags": ["v2", "library", "private", "store", "library"], - "summary": "Get Agent By Store ID", - "description": "Get Library Agent from Store Listing Version ID.", - "operationId": "getV2Get agent by store id", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Library agent found", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/LibraryAgent" }, - { "type": "null" } - ], - "title": "Response Getv2Get Agent By Store Id" - } - } - } - }, - "404": { "description": "Agent not found" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/{library_agent_id}/fork": { - "post": { - "tags": ["v2", "library", "private"], - "summary": "Fork Library Agent", - "operationId": "postV2Fork library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/otto/ask": { - "post": { - "tags": ["v2", "otto"], - "summary": "Proxy Otto Chat Request", - "description": "Proxy requests to Otto API while adding necessary security headers and logging.\nRequires an authenticated user.", - "operationId": "postV2Proxy otto chat request", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ChatRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ApiResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/email/unsubscribe": { - "post": { - "tags": ["v1", "email"], - "summary": "One Click Email Unsubscribe", - "operationId": "postV1One click email unsubscribe", - "parameters": [ - { - "name": "token", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Token" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/email/": { - "post": { - "tags": ["v1", "email"], - "summary": "Handle Postmark Email Webhooks", - "operationId": "postV1Handle postmark email webhooks", - "requestBody": { - "content": { - "application/json": { - "schema": { - "oneOf": [ - { "$ref": "#/components/schemas/PostmarkDeliveryWebhook" }, - { "$ref": "#/components/schemas/PostmarkBounceWebhook" }, - { - "$ref": "#/components/schemas/PostmarkSpamComplaintWebhook" - }, - { "$ref": "#/components/schemas/PostmarkOpenWebhook" }, - { "$ref": "#/components/schemas/PostmarkClickWebhook" }, - { - "$ref": "#/components/schemas/PostmarkSubscriptionChangeWebhook" - } - ], - "title": "Webhook", - "discriminator": { - "propertyName": "RecordType", - "mapping": { - "Delivery": "#/components/schemas/PostmarkDeliveryWebhook", - "Bounce": "#/components/schemas/PostmarkBounceWebhook", - "SpamComplaint": "#/components/schemas/PostmarkSpamComplaintWebhook", - "Open": "#/components/schemas/PostmarkOpenWebhook", - "Click": "#/components/schemas/PostmarkClickWebhook", - "SubscriptionChange": "#/components/schemas/PostmarkSubscriptionChangeWebhook" - } - } - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - }, - "security": [{ "APIKeyAuthenticator-X-Postmark-Webhook-Token": [] }] - } - }, - "/api/chat/sessions": { - "post": { - "tags": ["v2", "chat", "chat"], - "summary": "Create Session", - "description": "Create a new chat session.\n\nInitiates a new chat session for either an authenticated or anonymous user.\n\nArgs:\n user_id: The optional authenticated user ID parsed from the JWT. If missing, creates an anonymous session.\n\nReturns:\n CreateSessionResponse: Details of the created session.", - "operationId": "postV2CreateSession", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateSessionResponse" + "type": "boolean", + "title": "Response Deletev2Delete Store Submission" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/chat/sessions/{session_id}": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Get Session", - "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.", - "operationId": "getV2GetSession", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SessionDetailResponse" - } - } - } }, "422": { "description": "Validation Error", @@ -5244,491 +5730,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/sessions/{session_id}/stream": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Stream Chat", - "description": "Stream chat responses for a session.\n\nStreams the AI/completion responses in real time over Server-Sent Events (SSE), including:\n - Text fragments as they are generated\n - Tool call UI elements (if invoked)\n - Tool execution results\n\nArgs:\n session_id: The chat session identifier to associate with the streamed messages.\n message: The user's new message to process.\n user_id: Optional authenticated user ID.\n is_user_message: Whether the message is a user message.\nReturns:\n StreamingResponse: SSE-formatted response chunks.", - "operationId": "getV2StreamChat", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - }, - { - "name": "message", - "in": "query", - "required": true, - "schema": { - "type": "string", - "minLength": 1, - "maxLength": 10000, - "title": "Message" - } - }, - { - "name": "is_user_message", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": true, - "title": "Is User Message" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/sessions/{session_id}/assign-user": { - "patch": { - "tags": ["v2", "chat", "chat"], - "summary": "Session Assign User", - "description": "Assign an authenticated user to a chat session.\n\nUsed (typically post-login) to claim an existing anonymous session as the current authenticated user.\n\nArgs:\n session_id: The identifier for the (previously anonymous) session.\n user_id: The authenticated user's ID to associate with the session.\n\nReturns:\n dict: Status of the assignment.", - "operationId": "patchV2SessionAssignUser", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": true, - "title": "Response Patchv2Sessionassignuser" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/health": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Health Check", - "description": "Health check endpoint for the chat service.\n\nPerforms a full cycle test of session creation, assignment, and retrieval. Should always return healthy\nif the service and data layer are operational.\n\nReturns:\n dict: A status dictionary indicating health, service name, and API version.", - "operationId": "getV2HealthCheck", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": true, - "type": "object", - "title": "Response Getv2Healthcheck" - } - } - } - } - } - } - }, - "/api/oauth/app/{client_id}": { - "get": { - "tags": ["oauth"], - "summary": "Get Oauth App Info", - "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", - "operationId": "getOauthGetOauthAppInfo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "client_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Client Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationPublicInfo" - } - } - } - }, - "404": { "description": "Application not found or disabled" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/authorize": { - "post": { - "tags": ["oauth"], - "summary": "Authorize", - "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", - "operationId": "postOauthAuthorize", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/oauth/token": { - "post": { - "tags": ["oauth"], - "summary": "Token", - "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", - "operationId": "postOauthToken", - "requestBody": { - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/TokenRequestByCode" }, - { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } - ], - "title": "Request" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TokenResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/introspect": { - "post": { - "tags": ["oauth"], - "summary": "Introspect", - "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", - "operationId": "postOauthIntrospect", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postOauthIntrospect" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TokenIntrospectionResult" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/revoke": { - "post": { - "tags": ["oauth"], - "summary": "Revoke", - "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", - "operationId": "postOauthRevoke", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/apps/mine": { - "get": { - "tags": ["oauth"], - "summary": "List My Oauth Apps", - "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", - "operationId": "getOauthListMyOauthApps", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - }, - "type": "array", - "title": "Response Getoauthlistmyoauthapps" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/oauth/apps/{app_id}/status": { - "patch": { - "tags": ["oauth"], - "summary": "Update App Status", - "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", - "operationId": "patchOauthUpdateAppStatus", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/apps/{app_id}/logo": { - "patch": { - "tags": ["oauth"], - "summary": "Update App Logo", - "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", - "operationId": "patchOauthUpdateAppLogo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/apps/{app_id}/logo/upload": { - "post": { - "tags": ["oauth"], - "summary": "Upload App Logo", - "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", - "operationId": "postOauthUploadAppLogo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } @@ -6318,6 +6319,25 @@ "required": ["is_active"], "title": "Body_patchOauthUpdateAppStatus" }, + "Body_postAnalyticsLogRawAnalytics": { + "properties": { + "type": { "type": "string", "title": "Type" }, + "data": { + "additionalProperties": true, + "type": "object", + "title": "Data", + "description": "The data to log" + }, + "data_index": { + "type": "string", + "title": "Data Index", + "description": "Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc." + } + }, + "type": "object", + "required": ["type", "data", "data_index"], + "title": "Body_postAnalyticsLogRawAnalytics" + }, "Body_postOauthIntrospect": { "properties": { "token": { @@ -6426,25 +6446,6 @@ "type": "object", "title": "Body_postV1Execute graph agent" }, - "Body_postV1LogRawAnalytics": { - "properties": { - "type": { "type": "string", "title": "Type" }, - "data": { - "additionalProperties": true, - "type": "object", - "title": "Data", - "description": "The data to log" - }, - "data_index": { - "type": "string", - "title": "Data Index", - "description": "Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc." - } - }, - "type": "object", - "required": ["type", "data", "data_index"], - "title": "Body_postV1LogRawAnalytics" - }, "Body_postV1Upload_file_to_cloud_storage": { "properties": { "file": { "type": "string", "format": "binary", "title": "File" } @@ -11627,15 +11628,15 @@ } }, "securitySchemes": { - "HTTPBearerJWT": { - "type": "http", - "scheme": "bearer", - "bearerFormat": "jwt" - }, "APIKeyAuthenticator-X-Postmark-Webhook-Token": { "type": "apiKey", "in": "header", "name": "X-Postmark-Webhook-Token" + }, + "HTTPBearerJWT": { + "type": "http", + "scheme": "bearer", + "bearerFormat": "jwt" } }, "responses": { From 08a60dcb9b33dfe486d1be0cf6bfe1c792dbd63b Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sat, 20 Dec 2025 22:46:24 +0100 Subject: [PATCH 15/25] refactor(frontend): Clean up React Query-related code (#11604) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - #11603 ### Changes 🏗️ Frontend: - Make `okData` infer the response data type instead of casting - Generalize infinite query utilities from `SidebarRunsList/helpers.ts` - Move to `@/app/api/helpers` and use wherever possible - Simplify/replace boilerplate checks and conditions with `okData` in many places - Add `useUserTimezone` hook to replace all the boilerplate timezone queries Backend: - Fix response type annotation of `GET /api/store/graph/{store_listing_version_id}` endpoint - Fix documentation and error behavior of `GET /api/review/execution/{graph_exec_id}` endpoint ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - CI passes - [x] Clicking around the app manually -> no obvious issues - [x] Test Onboarding step 5 (run) - [x] Library runs list loads normally --- .../executions/review/review_routes_test.py | 29 +++--- .../api/features/executions/review/routes.py | 9 +- .../backend/api/features/store/routes.py | 4 +- autogpt_platform/frontend/orval.config.ts | 6 ++ .../app/(no-navbar)/onboarding/5-run/page.tsx | 28 +++--- .../onboarding/5-run/useOnboardingRunStep.tsx | 66 ++++++------- .../app/(no-navbar)/share/[token]/page.tsx | 6 +- .../components/ExecutionAnalyticsForm.tsx | 21 ++--- .../auth/integrations/setup-wizard/page.tsx | 3 +- .../useCronSchedulerDialog.ts | 8 +- .../build/components/FlowEditor/Flow/Flow.tsx | 3 +- .../BlockMenuSearch/BlockMenuSearch.tsx | 6 +- .../BlockMenuSearch/useBlockMenuSearch.ts | 58 +++++------- .../IntegrationBlocks/useIntegrationBlocks.ts | 36 +++---- .../useMarketplaceAgentsContent.ts | 28 ++---- .../MyAgentsContent/useMyAgentsContent.ts | 26 ++--- .../usePaginatedBlocks.ts | 26 ++--- .../usePaginatedIntegrationList.ts | 26 ++--- .../src/app/(platform)/chat/useChatSession.ts | 7 +- .../TimezoneNotice/TimezoneNotice.tsx | 10 +- .../selected-views/AgentActionsDropdown.tsx | 6 +- .../useSelectedRunActions.ts | 13 +-- .../SelectedRunView/useSelectedRunView.ts | 25 ++--- .../SelectedScheduleView.tsx | 16 ++-- .../EditScheduleModal/useEditScheduleModal.ts | 5 +- .../useSelectedScheduleView.ts | 17 ++-- .../components/SelectedTemplateActions.tsx | 16 ++-- .../useSelectedTemplateView.ts | 11 +-- .../components/SelectedTriggerActions.tsx | 16 ++-- .../useSelectedTriggerView.ts | 3 +- .../components/TaskActionsDropdown.tsx | 6 +- .../sidebar/SidebarRunsList/helpers.ts | 44 --------- .../SidebarRunsList/useSidebarRunsList.ts | 24 +++-- .../useNewAgentLibraryView.ts | 9 +- .../agent-schedule-details-view.tsx | 8 +- .../components/cron-scheduler-dialog.tsx | 8 +- .../OldAgentLibraryView/use-agent-runs.ts | 38 +++----- .../LibraryAgentList/useLibraryAgentList.ts | 30 +++--- .../library/hooks/useFavoriteAgents.ts | 39 +++----- .../monitoring/components/SchedulesTable.tsx | 8 +- .../src/app/(platform)/monitoring/page.tsx | 4 +- .../components/APIKeySection/useAPISection.ts | 7 +- .../oauth-apps/components/useOAuthApps.ts | 3 +- .../profile/(user)/settings/page.tsx | 7 +- .../frontend/src/app/api/helpers.ts | 94 ++++++++++++++++++- .../frontend/src/app/api/openapi.json | 11 ++- .../GoogleDrivePicker/useGoogleDrivePicker.ts | 5 +- .../AgentSelectStep/AgentSelectStep.tsx | 6 +- .../AgentSelectStep/useAgentSelectStep.ts | 53 ++++++----- .../useAgentActivityDropdown.ts | 3 +- .../layout/Navbar/components/NavbarView.tsx | 3 +- .../FloatingReviewsPanel.tsx | 9 +- .../PendingReviewsList/PendingReviewsList.tsx | 12 +-- .../frontend/src/hooks/useAgentSafeMode.ts | 13 ++- .../frontend/src/hooks/usePendingReviews.ts | 5 +- .../frontend/src/lib/hooks/useUserTimezone.ts | 8 ++ .../src/lib/react-query/queryClient.ts | 4 + 57 files changed, 453 insertions(+), 542 deletions(-) delete mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts create mode 100644 autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts diff --git a/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py index 9d1df5f999..c4eba0befc 100644 --- a/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py @@ -55,7 +55,7 @@ def sample_pending_review(test_user_id: str) -> PendingHumanReviewModel: def test_get_pending_reviews_empty( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, test_user_id: str, ) -> None: @@ -73,7 +73,7 @@ def test_get_pending_reviews_empty( def test_get_pending_reviews_with_data( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, snapshot: Snapshot, test_user_id: str, @@ -95,7 +95,7 @@ def test_get_pending_reviews_with_data( def test_get_pending_reviews_for_execution_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, snapshot: Snapshot, test_user_id: str, @@ -122,9 +122,8 @@ def test_get_pending_reviews_for_execution_success( assert data[0]["graph_exec_id"] == "test_graph_exec_456" -def test_get_pending_reviews_for_execution_access_denied( - mocker: pytest_mock.MockFixture, - test_user_id: str, +def test_get_pending_reviews_for_execution_not_available( + mocker: pytest_mock.MockerFixture, ) -> None: """Test access denied when user doesn't own the execution""" mock_get_graph_execution = mocker.patch( @@ -134,12 +133,12 @@ def test_get_pending_reviews_for_execution_access_denied( response = client.get("/api/review/execution/test_graph_exec_456") - assert response.status_code == 403 - assert "Access denied" in response.json()["detail"] + assert response.status_code == 404 + assert "not found" in response.json()["detail"] def test_process_review_action_approve_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -203,7 +202,7 @@ def test_process_review_action_approve_success( def test_process_review_action_reject_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -263,7 +262,7 @@ def test_process_review_action_reject_success( def test_process_review_action_mixed_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -370,7 +369,7 @@ def test_process_review_action_mixed_success( def test_process_review_action_empty_request( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, test_user_id: str, ) -> None: """Test error when no reviews provided""" @@ -387,7 +386,7 @@ def test_process_review_action_empty_request( def test_process_review_action_review_not_found( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, test_user_id: str, ) -> None: """Test error when review is not found""" @@ -423,7 +422,7 @@ def test_process_review_action_review_not_found( def test_process_review_action_partial_failure( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -457,7 +456,7 @@ def test_process_review_action_partial_failure( def test_process_review_action_invalid_node_exec_id( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: diff --git a/autogpt_platform/backend/backend/api/features/executions/review/routes.py b/autogpt_platform/backend/backend/api/features/executions/review/routes.py index 4aa4fac49b..88646046da 100644 --- a/autogpt_platform/backend/backend/api/features/executions/review/routes.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/routes.py @@ -67,8 +67,7 @@ async def list_pending_reviews( response_model=List[PendingHumanReviewModel], responses={ 200: {"description": "List of pending reviews for the execution"}, - 400: {"description": "Invalid graph execution ID"}, - 403: {"description": "Access denied to graph execution"}, + 404: {"description": "Graph execution not found"}, 500: {"description": "Server error", "content": {"application/json": {}}}, }, ) @@ -91,7 +90,7 @@ async def list_pending_reviews_for_execution( Raises: HTTPException: - - 403: If user doesn't own the graph execution + - 404: If the graph execution doesn't exist or isn't owned by this user - 500: If authentication fails or database error occurs Note: @@ -105,8 +104,8 @@ async def list_pending_reviews_for_execution( ) if not graph_exec: raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Access denied to graph execution", + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Graph execution #{graph_exec_id} not found", ) return await get_pending_reviews_for_execution(graph_exec_id, user_id) diff --git a/autogpt_platform/backend/backend/api/features/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py index 6a9bb05291..7d4db50d3f 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -173,7 +173,9 @@ async def get_agent(username: str, agent_name: str): tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], ) -async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: str): +async def get_graph_meta_by_store_listing_version_id( + store_listing_version_id: str, +) -> backend.data.graph.GraphMeta: """ Get Agent Graph from Store Listing Version ID. """ diff --git a/autogpt_platform/frontend/orval.config.ts b/autogpt_platform/frontend/orval.config.ts index de305c1acc..dff857e1b6 100644 --- a/autogpt_platform/frontend/orval.config.ts +++ b/autogpt_platform/frontend/orval.config.ts @@ -41,6 +41,12 @@ export default defineConfig({ useInfiniteQueryParam: "page", }, }, + "getV2List presets": { + query: { + useInfinite: true, + useInfiniteQueryParam: "page", + }, + }, "getV1List graph executions": { query: { useInfinite: true, diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx index 58960a0cf6..30e1b67090 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx @@ -25,7 +25,7 @@ export default function Page() { ready, error, showInput, - agent, + agentGraph, onboarding, storeAgent, runningAgent, @@ -76,19 +76,19 @@ export default function Page() { Input - {Object.entries(agent?.input_schema.properties || {}).map( - ([key, inputSubSchema]) => ( - handleSetAgentInput(key, value)} - /> - ), - )} + {Object.entries( + agentGraph?.input_schema.properties || {}, + ).map(([key, inputSubSchema]) => ( + handleSetAgentInput(key, value)} + /> + ))} ) || undefined @@ -104,7 +104,7 @@ export default function Page() { className="mt-8 w-[136px]" loading={runningAgent} disabled={isRunDisabled({ - agent, + agent: agentGraph, isRunning: runningAgent, agentInputs: (onboarding.state?.agentInput as unknown as InputValues) || diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx index 37538a2191..f143c89d44 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx @@ -1,6 +1,3 @@ -import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; -import { GraphMeta } from "@/app/api/__generated__/models/graphMeta"; -import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useBackendAPI } from "@/lib/autogpt-server-api/context"; import { useOnboarding } from "@/providers/onboarding/onboarding-provider"; @@ -8,20 +5,19 @@ import { useRouter } from "next/navigation"; import { useEffect, useState } from "react"; import { computeInitialAgentInputs } from "./helpers"; import { InputValues } from "./types"; +import { okData, resolveResponse } from "@/app/api/helpers"; +import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library"; import { useGetV2GetAgentByVersion, useGetV2GetAgentGraph, } from "@/app/api/__generated__/endpoints/store/store"; -import { resolveResponse } from "@/app/api/helpers"; -import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library"; +import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; import { GraphID } from "@/lib/autogpt-server-api"; export function useOnboardingRunStep() { const onboarding = useOnboarding(undefined, "AGENT_CHOICE"); const [showInput, setShowInput] = useState(false); - const [agent, setAgent] = useState(null); - const [storeAgent, setStoreAgent] = useState(null); const [runningAgent, setRunningAgent] = useState(false); const [inputCredentials, setInputCredentials] = useState< @@ -38,12 +34,26 @@ export function useOnboardingRunStep() { const currentAgentVersion = onboarding.state?.selectedStoreListingVersionId ?? ""; - const storeAgentQuery = useGetV2GetAgentByVersion(currentAgentVersion, { - query: { enabled: !!currentAgentVersion }, + const { + data: storeAgent, + error: storeAgentQueryError, + isSuccess: storeAgentQueryIsSuccess, + } = useGetV2GetAgentByVersion(currentAgentVersion, { + query: { + enabled: !!currentAgentVersion, + select: okData, + }, }); - const graphMetaQuery = useGetV2GetAgentGraph(currentAgentVersion, { - query: { enabled: !!currentAgentVersion }, + const { + data: agentGraphMeta, + error: agentGraphQueryError, + isSuccess: agentGraphQueryIsSuccess, + } = useGetV2GetAgentGraph(currentAgentVersion, { + query: { + enabled: !!currentAgentVersion, + select: okData, + }, }); useEffect(() => { @@ -51,29 +61,15 @@ export function useOnboardingRunStep() { }, []); useEffect(() => { - if (storeAgentQuery.data && storeAgentQuery.data.status === 200) { - setStoreAgent(storeAgentQuery.data.data); - } - }, [storeAgentQuery.data]); - - useEffect(() => { - if ( - graphMetaQuery.data && - graphMetaQuery.data.status === 200 && - onboarding.state - ) { - const graphMeta = graphMetaQuery.data.data as GraphMeta; - - setAgent(graphMeta); - - const update = computeInitialAgentInputs( - graphMeta, + if (agentGraphMeta && onboarding.state) { + const initialAgentInputs = computeInitialAgentInputs( + agentGraphMeta, (onboarding.state.agentInput as unknown as InputValues) || null, ); - onboarding.updateState({ agentInput: update }); + onboarding.updateState({ agentInput: initialAgentInputs }); } - }, [graphMetaQuery.data]); + }, [agentGraphMeta]); function handleNewRun() { if (!onboarding.state) return; @@ -95,7 +91,7 @@ export function useOnboardingRunStep() { } async function handleRunAgent() { - if (!agent || !storeAgent || !onboarding.state) { + if (!agentGraphMeta || !storeAgent || !onboarding.state) { toast({ title: "Error getting agent", description: @@ -142,12 +138,12 @@ export function useOnboardingRunStep() { } return { - ready: graphMetaQuery.isSuccess && storeAgentQuery.isSuccess, - error: graphMetaQuery.error || storeAgentQuery.error, - agent, + ready: agentGraphQueryIsSuccess && storeAgentQueryIsSuccess, + error: agentGraphQueryError || storeAgentQueryError, + agentGraph: agentGraphMeta || null, onboarding, showInput, - storeAgent, + storeAgent: storeAgent || null, runningAgent, credentialsValid, credentialsLoaded, diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx index c24f9e11a3..1c37c6c72f 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx @@ -1,6 +1,7 @@ "use client"; import { RunOutputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunOutputs"; +import { okData } from "@/app/api/helpers"; import { useGetV1GetSharedExecution } from "@/app/api/__generated__/endpoints/default/default"; import { Card, @@ -17,12 +18,11 @@ export default function SharePage() { const token = params.token as string; const { - data: response, + data: executionData, isLoading: loading, error, - } = useGetV1GetSharedExecution(token); + } = useGetV1GetSharedExecution(token, { query: { select: okData } }); - const executionData = response?.status === 200 ? response.data : undefined; const is404 = !loading && !executionData; if (loading) { diff --git a/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx b/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx index fd77628140..5aced56090 100644 --- a/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx @@ -41,6 +41,7 @@ interface FormData extends Omit { // All other fields use the generated types as-is } import { AnalyticsResultsTable } from "./AnalyticsResultsTable"; +import { okData } from "@/app/api/helpers"; export function ExecutionAnalyticsForm() { const [results, setResults] = useState( @@ -178,7 +179,7 @@ export function ExecutionAnalyticsForm() { data: config, isLoading: configLoading, error: configError, - } = useGetV2GetExecutionAnalyticsConfiguration(); + } = useGetV2GetExecutionAnalyticsConfiguration({ query: { select: okData } }); const generateAnalytics = usePostV2GenerateExecutionAnalytics({ mutation: { @@ -231,10 +232,10 @@ export function ExecutionAnalyticsForm() { // Update form defaults when config loads useEffect(() => { - if (config?.data && config.status === 200 && !formData.model_name) { + if (config && !formData.model_name) { setFormData((prev) => ({ ...prev, - model_name: config.data.recommended_model, + model_name: config.recommended_model, })); } }, [config, formData.model_name]); @@ -307,7 +308,7 @@ export function ExecutionAnalyticsForm() { } // Show error state if config fails to load - if (configError || !config?.data || config.status !== 200) { + if (configError || !config) { return (
Failed to load configuration
@@ -315,8 +316,6 @@ export function ExecutionAnalyticsForm() { ); } - const configData = config.data; - return (
@@ -382,7 +381,7 @@ export function ExecutionAnalyticsForm() { - {configData.available_models.map((model) => ( + {config.available_models.map((model) => ( {model.label} @@ -442,7 +441,7 @@ export function ExecutionAnalyticsForm() { onChange={(e) => handleInputChange("system_prompt", e.target.value) } - placeholder={configData.default_system_prompt} + placeholder={config.default_system_prompt} rows={6} className="resize-y" /> @@ -463,7 +462,7 @@ export function ExecutionAnalyticsForm() { onChange={(e) => handleInputChange("user_prompt", e.target.value) } - placeholder={configData.default_user_prompt} + placeholder={config.default_user_prompt} rows={8} className="resize-y" /> @@ -490,7 +489,7 @@ export function ExecutionAnalyticsForm() { onClick={() => { handleInputChange( "system_prompt", - configData.default_system_prompt, + config.default_system_prompt, ); }} > @@ -503,7 +502,7 @@ export function ExecutionAnalyticsForm() { onClick={() => { handleInputChange( "user_prompt", - configData.default_user_prompt, + config.default_user_prompt, ); }} > diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx index 5163c46d5b..3372772c89 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx @@ -17,7 +17,6 @@ import type { import { CheckIcon, CircleIcon } from "@phosphor-icons/react"; import { useGetOauthGetOauthAppInfo } from "@/app/api/__generated__/endpoints/oauth/oauth"; import { okData } from "@/app/api/helpers"; -import { OAuthApplicationPublicInfo } from "@/app/api/__generated__/models/oAuthApplicationPublicInfo"; // All credential types - we accept any type of credential const ALL_CREDENTIAL_TYPES: CredentialsType[] = [ @@ -107,7 +106,7 @@ export default function IntegrationSetupWizardPage() { const state = searchParams.get("state"); const { data: appInfo } = useGetOauthGetOauthAppInfo(clientID || "", { - query: { enabled: !!clientID, select: okData }, + query: { enabled: !!clientID, select: okData }, }); // Parse providers from base64-encoded JSON diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts index 4d5f8bf254..1abfabbdba 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts @@ -1,6 +1,6 @@ -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { usePostV1CreateExecutionSchedule } from "@/app/api/__generated__/endpoints/schedules/schedules"; import { useToast } from "@/components/molecules/Toast/use-toast"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; import { parseAsInteger, parseAsString, useQueryStates } from "nuqs"; import { useEffect, useState } from "react"; @@ -28,11 +28,7 @@ export const useCronSchedulerDialog = ({ flowExecutionID: parseAsString, }); - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const timezoneDisplay = getTimezoneDisplayName(userTimezone || "UTC"); const { mutateAsync: createSchedule, isPending: isCreatingSchedule } = diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx index d312fd487d..c9cf5296c6 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx @@ -17,7 +17,6 @@ import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPane import { parseAsString, useQueryStates } from "nuqs"; import { CustomControls } from "./components/CustomControl"; import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs"; -import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { okData } from "@/app/api/helpers"; import { TriggerAgentBanner } from "./components/TriggerAgentBanner"; import { resolveCollisions } from "./helpers/resolve-collision"; @@ -34,7 +33,7 @@ export const Flow = () => { {}, { query: { - select: okData, + select: okData, enabled: !!flowID, }, }, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx index 71888b62ee..de339431e8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx @@ -14,7 +14,7 @@ import { NoSearchResult } from "../NoSearchResult"; export const BlockMenuSearch = () => { const { - allSearchData, + searchResults, isFetchingNextPage, fetchNextPage, hasNextPage, @@ -39,7 +39,7 @@ export const BlockMenuSearch = () => { ); } - if (allSearchData.length === 0) { + if (searchResults.length === 0) { return ; } @@ -53,7 +53,7 @@ export const BlockMenuSearch = () => { loader={} className="space-y-2.5" > - {allSearchData.map((item: SearchResponseItemsItem, index: number) => { + {searchResults.map((item: SearchResponseItemsItem, index: number) => { const { type, data } = getSearchItemType(item); // backend give support to these 3 types only [right now] - we need to give support to integration and ai agent types in follow up PRs switch (type) { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts index 3eb14d3ca9..beff80a984 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts @@ -1,19 +1,25 @@ -import { useBlockMenuStore } from "../../../../stores/blockMenuStore"; -import { useGetV2BuilderSearchInfinite } from "@/app/api/__generated__/endpoints/store/store"; -import { SearchResponse } from "@/app/api/__generated__/models/searchResponse"; import { useCallback, useEffect, useState } from "react"; +import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store"; +import { + getPaginationNextPageNumber, + okData, + unpaginate, +} from "@/app/api/helpers"; +import { + getGetV2GetBuilderItemCountsQueryKey, + getGetV2GetBuilderSuggestionsQueryKey, +} from "@/app/api/__generated__/endpoints/default/default"; import { getGetV2ListLibraryAgentsQueryKey, getV2GetLibraryAgent, usePostV2AddMarketplaceAgent, } from "@/app/api/__generated__/endpoints/library/library"; import { - getGetV2GetBuilderItemCountsQueryKey, - getGetV2GetBuilderSuggestionsQueryKey, -} from "@/app/api/__generated__/endpoints/default/default"; + getV2GetSpecificAgent, + useGetV2BuilderSearchInfinite, +} from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { getQueryClient } from "@/lib/react-query/queryClient"; import { useToast } from "@/components/molecules/Toast/use-toast"; import * as Sentry from "@sentry/nextjs"; @@ -40,7 +46,7 @@ export const useBlockMenuSearch = () => { >(null); const { - data: searchData, + data: searchQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -53,18 +59,7 @@ export const useBlockMenuSearch = () => { search_id: searchId, }, { - query: { - getNextPageParam: (lastPage) => { - const response = lastPage.data as SearchResponse; - const { pagination } = response; - if (!pagination) { - return undefined; - } - - const { current_page, total_pages } = pagination; - return current_page < total_pages ? current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); @@ -93,16 +88,15 @@ export const useBlockMenuSearch = () => { }); useEffect(() => { - if (!searchData?.pages?.length) { + if (!searchQueryData?.pages?.length) { return; } - const latestPage = searchData.pages[searchData.pages.length - 1]; - const response = latestPage?.data as SearchResponse; - if (response?.search_id && response.search_id !== searchId) { - setSearchId(response.search_id); + const lastPage = okData(searchQueryData.pages.at(-1)); + if (lastPage?.search_id && lastPage.search_id !== searchId) { + setSearchId(lastPage.search_id); } - }, [searchData, searchId, setSearchId]); + }, [searchQueryData, searchId, setSearchId]); useEffect(() => { if (searchId && !searchQuery) { @@ -110,11 +104,9 @@ export const useBlockMenuSearch = () => { } }, [resetSearchSession, searchId, searchQuery]); - const allSearchData = - searchData?.pages?.flatMap((page) => { - const response = page.data as SearchResponse; - return response.items; - }) ?? []; + const searchResults = searchQueryData + ? unpaginate(searchQueryData, "items") + : []; const handleAddLibraryAgent = async (agent: LibraryAgent) => { setAddingLibraryAgentId(agent.id); @@ -177,7 +169,7 @@ export const useBlockMenuSearch = () => { }; return { - allSearchData, + searchResults, isFetchingNextPage, fetchNextPage, hasNextPage, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts index 678f903936..c6dcd61e36 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts @@ -1,6 +1,10 @@ +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2GetBuilderBlocksInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { BlockResponse } from "@/app/api/__generated__/models/blockResponse"; -import { useBlockMenuStore } from "../../../../stores/blockMenuStore"; +import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore"; const PAGE_SIZE = 10; @@ -8,7 +12,7 @@ export const useIntegrationBlocks = () => { const { integration } = useBlockMenuStore(); const { - data: blocks, + data: blocksQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -22,30 +26,16 @@ export const useIntegrationBlocks = () => { provider: integration, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as BlockResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allBlocks = - blocks?.pages?.flatMap((page) => { - const response = page.data as BlockResponse; - return response.blocks; - }) ?? []; + const allBlocks = blocksQueryData + ? unpaginate(blocksQueryData, "blocks") + : []; + const totalBlocks = getPaginatedTotalCount(blocksQueryData); - const totalBlocks = blocks?.pages[0] - ? (blocks.pages[0].data as BlockResponse).pagination.total_items - : 0; - - const status = blocks?.pages[0]?.status; + const status = blocksQueryData?.pages[0]?.status; return { allBlocks, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts index ff9b70b79a..c45f36ae87 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts @@ -1,3 +1,4 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default"; import { getGetV2ListLibraryAgentsQueryKey, @@ -8,13 +9,12 @@ import { getV2GetSpecificAgent, useGetV2ListStoreAgentsInfinite, } from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { StoreAgentsResponse } from "@/lib/autogpt-server-api"; import { getQueryClient } from "@/lib/react-query/queryClient"; import * as Sentry from "@sentry/nextjs"; import { useState } from "react"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; export const useMarketplaceAgentsContent = () => { const { toast } = useToast(); @@ -22,7 +22,7 @@ export const useMarketplaceAgentsContent = () => { const { addAgentToBuilder } = useAddAgentToBuilder(); const { - data: listStoreAgents, + data: storeAgentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -36,26 +36,14 @@ export const useMarketplaceAgentsContent = () => { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as StoreAgentsResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - listStoreAgents?.pages?.flatMap((page) => { - const response = page.data as StoreAgentsResponse; - return response.agents; - }) ?? []; - - const status = listStoreAgents?.pages[0]?.status; + const allAgents = storeAgentsQueryData + ? unpaginate(storeAgentsQueryData, "agents") + : []; + const status = storeAgentsQueryData?.pages[0]?.status; const { mutateAsync: addMarketplaceAgent } = usePostV2AddMarketplaceAgent({ mutation: { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts index 88645393d7..5ce19afe96 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts @@ -1,5 +1,5 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; -import { LibraryAgentResponse } from "@/app/api/__generated__/models/libraryAgentResponse"; import { useState } from "react"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; @@ -12,7 +12,7 @@ export const useMyAgentsContent = () => { const { toast } = useToast(); const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -26,26 +26,14 @@ export const useMyAgentsContent = () => { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as LibraryAgentResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - const response = page.data as LibraryAgentResponse; - return response.agents; - }) ?? []; - - const status = agents?.pages[0]?.status; + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const status = agentsQueryData?.pages[0]?.status; const handleAddBlock = async (agent: LibraryAgent) => { setSelectedAgentId(agent.id); diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts index 5348998021..b44fb871f3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts @@ -1,5 +1,5 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2GetBuilderBlocksInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { BlockResponse } from "@/app/api/__generated__/models/blockResponse"; interface UsePaginatedBlocksProps { type?: "all" | "input" | "action" | "output" | null; @@ -8,7 +8,7 @@ interface UsePaginatedBlocksProps { const PAGE_SIZE = 10; export const usePaginatedBlocks = ({ type }: UsePaginatedBlocksProps) => { const { - data: blocks, + data: blocksQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -22,26 +22,14 @@ export const usePaginatedBlocks = ({ type }: UsePaginatedBlocksProps) => { type, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as BlockResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allBlocks = - blocks?.pages?.flatMap((page) => { - const response = page.data as BlockResponse; - return response.blocks; - }) ?? []; - - const status = blocks?.pages[0]?.status; + const allBlocks = blocksQueryData + ? unpaginate(blocksQueryData, "blocks") + : []; + const status = blocksQueryData?.pages[0]?.status; return { allBlocks, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts index cf84ed94eb..3462b8f619 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts @@ -1,11 +1,11 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2GetBuilderIntegrationProvidersInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { ProviderResponse } from "@/app/api/__generated__/models/providerResponse"; const PAGE_SIZE = 10; export const usePaginatedIntegrationList = () => { const { - data: providers, + data: providersQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -18,26 +18,14 @@ export const usePaginatedIntegrationList = () => { page_size: PAGE_SIZE, }, { - query: { - getNextPageParam: (lastPage: any) => { - const pagination = (lastPage.data as ProviderResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allProviders = - providers?.pages?.flatMap((page: any) => { - const response = page.data as ProviderResponse; - return response.providers; - }) ?? []; - - const status = providers?.pages[0]?.status; + const allProviders = providersQueryData + ? unpaginate(providersQueryData, "providers") + : []; + const status = providersQueryData?.pages[0]?.status; return { allProviders, diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts b/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts index 0a350f98bb..99f4efc093 100644 --- a/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts +++ b/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts @@ -11,6 +11,7 @@ import { import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; import { storage, Key } from "@/services/storage/local-storage"; import { isValidUUID } from "@/app/(platform)/chat/helpers"; +import { okData } from "@/app/api/helpers"; interface UseChatSessionArgs { urlSessionId?: string | null; @@ -70,6 +71,7 @@ export function useChatSession({ } = useGetV2GetSession(sessionId || "", { query: { enabled: !!sessionId, + select: okData, staleTime: Infinity, // Never mark as stale refetchOnMount: false, // Don't refetch on component mount refetchOnWindowFocus: false, // Don't refetch when window regains focus @@ -81,9 +83,8 @@ export function useChatSession({ const { mutateAsync: claimSessionMutation } = usePatchV2SessionAssignUser(); const session = useMemo(() => { - if (sessionData?.status === 200) { - return sessionData.data; - } + if (sessionData) return sessionData; + if (sessionId && justCreatedSessionIdRef.current === sessionId) { return { id: sessionId, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx index d5d7c011a6..97ee4605f2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx @@ -1,15 +1,11 @@ -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; import { InfoIcon } from "@phosphor-icons/react"; export function TimezoneNotice() { - const { data: userTimezone, isSuccess } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); - if (!isSuccess) { + if (!userTimezone) { return null; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx index e94878f070..834173cba4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, getV1GetGraphVersion, useDeleteV1DeleteGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; @@ -127,9 +127,7 @@ export function AgentActionsDropdown({ toast({ title: "Task deleted" }); await queryClient.refetchQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); if (onClearSelectedRun) onClearSelectedRun(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts index 03fc0b4ae8..9bcfd9d964 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, usePostV1ExecuteGraphAgent, usePostV1StopGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; @@ -11,6 +11,7 @@ import { } from "@/app/api/__generated__/endpoints/presets/presets"; import type { GraphExecution } from "@/app/api/__generated__/models/graphExecution"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -58,9 +59,7 @@ export function useSelectedRunActions({ toast({ title: "Run stopped" }); await queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); } catch (error: unknown) { toast({ @@ -97,12 +96,10 @@ export function useSelectedRunActions({ }, }); - const newRunId = res?.status === 200 ? (res?.data?.id ?? "") : ""; + const newRunId = okData(res)?.id; await queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); if (newRunId && onSelectRun) onSelectRun(newRunId); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts index 342241ef89..e3e035cea0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts @@ -3,14 +3,12 @@ import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { useGetV2GetASpecificPreset } from "@/app/api/__generated__/endpoints/presets/presets"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; -import type { GetV1GetExecutionDetails200 } from "@/app/api/__generated__/models/getV1GetExecutionDetails200"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import { okData } from "@/app/api/helpers"; export function useSelectedRunView(graphId: string, runId: string) { - const query = useGetV1GetExecutionDetails(graphId, runId, { + const executionQuery = useGetV1GetExecutionDetails(graphId, runId, { query: { - refetchInterval: (q: any) => { + refetchInterval: (q) => { const isSuccess = q.state.data?.status === 200; if (!isSuccess) return false; @@ -33,22 +31,15 @@ export function useSelectedRunView(graphId: string, runId: string) { }, }); - const status = query.data?.status; + const run = okData(executionQuery.data); + const status = executionQuery.data?.status; - const run: GetV1GetExecutionDetails200 | undefined = - status === 200 - ? (query.data?.data as GetV1GetExecutionDetails200) - : undefined; - - const presetId = - run && "preset_id" in run && run.preset_id - ? (run.preset_id as string) - : undefined; + const presetId = run?.preset_id || undefined; const presetQuery = useGetV2GetASpecificPreset(presetId || "", { query: { enabled: !!presetId, - select: (res) => okData(res), + select: okData, }, }); @@ -60,8 +51,8 @@ export function useSelectedRunView(graphId: string, runId: string) { return { run, preset: presetQuery.data, - isLoading: query.isLoading || presetQuery.isLoading, - responseError: query.error || presetQuery.error, + isLoading: executionQuery.isLoading || presetQuery.isLoading, + responseError: executionQuery.error || presetQuery.error, httpError, } as const; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx index 0672ddc033..678f711097 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx @@ -1,12 +1,12 @@ "use client"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; import { Text } from "@/components/atoms/Text/Text"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { formatInTimezone, getTimezoneDisplayName } from "@/lib/timezone-utils"; import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly"; import { LoadingSelectedContent } from "../LoadingSelectedContent"; @@ -36,11 +36,7 @@ export function SelectedScheduleView({ scheduleId, ); - const { data: userTzRes } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const breakpoint = useBreakpoint(); const isLgScreenUp = isLargeScreen(breakpoint); @@ -90,7 +86,7 @@ export function SelectedScheduleView({ run={undefined} scheduleRecurrence={ schedule - ? `${humanizeCronExpression(schedule.cron || "")} · ${getTimezoneDisplayName(schedule.timezone || userTzRes || "UTC")}` + ? `${humanizeCronExpression(schedule.cron || "")} · ${getTimezoneDisplayName(schedule.timezone || userTimezone || "UTC")}` : undefined } /> @@ -125,7 +121,7 @@ export function SelectedScheduleView({ {" "} {getTimezoneDisplayName( - schedule.timezone || userTzRes || "UTC", + schedule.timezone || userTimezone || "UTC", )} @@ -135,7 +131,7 @@ export function SelectedScheduleView({ {formatInTimezone( schedule.next_run_time, - userTzRes || "UTC", + userTimezone || "UTC", { year: "numeric", month: "long", @@ -148,7 +144,7 @@ export function SelectedScheduleView({ {" "} {getTimezoneDisplayName( - schedule.timezone || userTzRes || "UTC", + schedule.timezone || userTimezone || "UTC", )} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts index b006e775f9..427340a427 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts @@ -1,7 +1,7 @@ "use client"; -import { getGetV1ListGraphExecutionsInfiniteQueryOptions } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV1ListExecutionSchedulesForAGraphQueryKey } from "@/app/api/__generated__/endpoints/schedules/schedules"; +import { getGetV1ListGraphExecutionsQueryKey } from "@/app/api/__generated__/endpoints/graphs/graphs"; import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useMutation, useQueryClient } from "@tanstack/react-query"; @@ -94,8 +94,7 @@ export function useEditScheduleModal( await queryClient.invalidateQueries({ queryKey: getGetV1ListExecutionSchedulesForAGraphQueryKey(graphId), }); - const runsKey = getGetV1ListGraphExecutionsInfiniteQueryOptions(graphId) - .queryKey as any; + const runsKey = getGetV1ListGraphExecutionsQueryKey(graphId); await queryClient.invalidateQueries({ queryKey: runsKey }); setIsOpen(false); }, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts index 01905eb296..66263e2dcc 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts @@ -2,30 +2,29 @@ import { useMemo } from "react"; import { useGetV1ListExecutionSchedulesForAGraph } from "@/app/api/__generated__/endpoints/schedules/schedules"; -import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; +import { okData } from "@/app/api/helpers"; export function useSelectedScheduleView(graphId: string, scheduleId: string) { - const query = useGetV1ListExecutionSchedulesForAGraph(graphId, { + const schedulesQuery = useGetV1ListExecutionSchedulesForAGraph(graphId, { query: { enabled: !!graphId, - select: (res) => - res.status === 200 ? (res.data as GraphExecutionJobInfo[]) : [], + select: okData, }, }); const schedule = useMemo( - () => query.data?.find((s) => s.id === scheduleId), - [query.data, scheduleId], + () => schedulesQuery.data?.find((s) => s.id === scheduleId), + [schedulesQuery.data, scheduleId], ); const httpError = - query.isSuccess && !schedule + schedulesQuery.isSuccess && !schedule ? { status: 404, statusText: "Not found" } : undefined; return { schedule, - isLoading: query.isLoading, - error: query.error || httpError, + isLoading: schedulesQuery.isLoading, + error: schedulesQuery.error || httpError, } as const; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx index 1d50ec7c85..008d2cc379 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx @@ -2,10 +2,10 @@ import { getGetV2ListPresetsQueryKey, + getV2ListPresets, useDeleteV2DeleteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; import { okData } from "@/app/api/helpers"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; @@ -56,15 +56,13 @@ export function SelectedTemplateActions({ queryKey, }); - const queryData = queryClient.getQueryData<{ - data: LibraryAgentPresetResponse; - }>(queryKey); + const queryData = + queryClient.getQueryData< + Awaited> + >(queryKey); - const presets = - okData(queryData)?.presets ?? []; - const templates = presets.filter( - (preset) => !preset.webhook_id || !preset.webhook, - ); + const presets = okData(queryData)?.presets ?? []; + const templates = presets.filter((preset) => !preset.webhook_id); setShowDeleteDialog(false); onDeleted?.(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts index a0f34f54a2..66dd26f488 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts @@ -1,6 +1,6 @@ "use client"; -import { getGetV1ListGraphExecutionsInfiniteQueryOptions } from "@/app/api/__generated__/endpoints/graphs/graphs"; +import { getGetV1ListGraphExecutionsQueryKey } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV2GetASpecificPresetQueryKey, getGetV2ListPresetsQueryKey, @@ -9,7 +9,6 @@ import { usePostV2ExecuteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import type { LibraryAgentPresetUpdatable } from "@/app/api/__generated__/models/libraryAgentPresetUpdatable"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; @@ -34,7 +33,7 @@ export function useSelectedTemplateView({ const query = useGetV2GetASpecificPreset(templateId, { query: { enabled: !!templateId, - select: (res) => okData(res), + select: okData, }, }); @@ -83,15 +82,13 @@ export function useSelectedTemplateView({ mutation: { onSuccess: (response) => { if (response.status === 200) { - const execution = okData(response); + const execution = okData(response); if (execution) { toast({ title: "Task started", }); queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(graphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(graphId), }); onRunCreated?.(execution); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx index 0746027f37..a5b895c3fa 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx @@ -2,10 +2,10 @@ import { getGetV2ListPresetsQueryKey, + getV2ListPresets, useDeleteV2DeleteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; import { okData } from "@/app/api/helpers"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; @@ -52,15 +52,13 @@ export function SelectedTriggerActions({ queryKey, }); - const queryData = queryClient.getQueryData<{ - data: LibraryAgentPresetResponse; - }>(queryKey); + const queryData = + queryClient.getQueryData< + Awaited> + >(queryKey); - const presets = - okData(queryData)?.presets ?? []; - const triggers = presets.filter( - (preset) => preset.webhook_id && preset.webhook, - ); + const presets = okData(queryData)?.presets ?? []; + const triggers = presets.filter((preset) => preset.webhook_id); setShowDeleteDialog(false); onDeleted?.(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts index 4669d850b2..235c653134 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts @@ -6,7 +6,6 @@ import { useGetV2GetASpecificPreset, usePatchV2UpdateAnExistingPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import type { LibraryAgentPresetUpdatable } from "@/app/api/__generated__/models/libraryAgentPresetUpdatable"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; @@ -26,7 +25,7 @@ export function useSelectedTriggerView({ triggerId, graphId }: Args) { const query = useGetV2GetASpecificPreset(triggerId, { query: { enabled: !!triggerId, - select: (res) => okData(res), + select: okData, }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx index 95cc7740f8..ba923bca68 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, useDeleteV1DeleteGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { @@ -51,9 +51,7 @@ export function TaskActionsDropdown({ agent, run, onDeleted }: Props) { toast({ title: "Task deleted" }); await queryClient.refetchQueries({ - queryKey: getGetV1ListGraphExecutionsInfiniteQueryOptions( - agent.graph_id, - ).queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agent.graph_id), }); setShowDeleteDialog(false); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts deleted file mode 100644 index 096e40239b..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts +++ /dev/null @@ -1,44 +0,0 @@ -import type { GraphExecutionsPaginated } from "@/app/api/__generated__/models/graphExecutionsPaginated"; -import type { InfiniteData } from "@tanstack/react-query"; - -function hasValidExecutionsData( - page: unknown, -): page is { data: GraphExecutionsPaginated } { - return ( - typeof page === "object" && - page !== null && - "data" in page && - typeof (page as { data: unknown }).data === "object" && - (page as { data: unknown }).data !== null && - "executions" in (page as { data: GraphExecutionsPaginated }).data - ); -} - -export function computeRunsCount( - infiniteData: InfiniteData | undefined, - runsLength: number, -): number { - const lastPage = infiniteData?.pages.at(-1); - if (!hasValidExecutionsData(lastPage)) return runsLength; - return lastPage.data.pagination?.total_items || runsLength; -} - -export function getNextRunsPageParam(lastPage: unknown): number | undefined { - if (!hasValidExecutionsData(lastPage)) return undefined; - - const { pagination } = lastPage.data; - const hasMore = - pagination.current_page * pagination.page_size < pagination.total_items; - return hasMore ? pagination.current_page + 1 : undefined; -} - -export function extractRunsFromPages( - infiniteData: InfiniteData | undefined, -) { - return ( - infiniteData?.pages.flatMap((page) => { - if (!hasValidExecutionsData(page)) return []; - return page.data.executions || []; - }) || [] - ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts index 7f7155bbdf..971b90c2e3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts @@ -2,20 +2,18 @@ import { useEffect, useMemo } from "react"; +import { + okData, + getPaginationNextPageNumber, + getPaginatedTotalCount, + unpaginate, +} from "@/app/api/helpers"; import { useGetV1ListGraphExecutionsInfinite } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { useGetV2ListPresets } from "@/app/api/__generated__/endpoints/presets/presets"; import { useGetV1ListExecutionSchedulesForAGraph } from "@/app/api/__generated__/endpoints/schedules/schedules"; -import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; -import { okData } from "@/app/api/helpers"; import { useExecutionEvents } from "@/hooks/useExecutionEvents"; import { useQueryClient } from "@tanstack/react-query"; import { parseAsString, useQueryStates } from "nuqs"; -import { - computeRunsCount, - extractRunsFromPages, - getNextRunsPageParam, -} from "./helpers"; function parseTab( value: string | null, @@ -66,7 +64,7 @@ export function useSidebarRunsList({ query: { enabled: !!graphId, refetchOnWindowFocus: false, - getNextPageParam: getNextRunsPageParam, + getNextPageParam: getPaginationNextPageNumber, }, }, ); @@ -74,7 +72,7 @@ export function useSidebarRunsList({ const schedulesQuery = useGetV1ListExecutionSchedulesForAGraph(graphId, { query: { enabled: !!graphId, - select: (r) => okData(r), + select: okData, }, }); @@ -83,13 +81,13 @@ export function useSidebarRunsList({ { query: { enabled: !!graphId, - select: (r) => okData(r)?.presets, + select: (r) => okData(r)?.presets, }, }, ); const runs = useMemo( - () => extractRunsFromPages(runsQuery.data), + () => (runsQuery.data ? unpaginate(runsQuery.data, "executions") : []), [runsQuery.data], ); @@ -104,7 +102,7 @@ export function useSidebarRunsList({ [allPresets], ); - const runsCount = computeRunsCount(runsQuery.data, runs.length); + const runsCount = getPaginatedTotalCount(runsQuery.data, runs.length); const schedulesCount = schedules.length; const templatesCount = templates.length; const triggersCount = triggers.length; diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts index 394edb1a6d..b4cc2baca8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts @@ -2,7 +2,6 @@ import { useGetV2GetLibraryAgent } from "@/app/api/__generated__/endpoints/libra import { useGetV2GetASpecificPreset } from "@/app/api/__generated__/endpoints/presets/presets"; import { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import { okData } from "@/app/api/helpers"; import { useParams } from "next/navigation"; @@ -31,11 +30,7 @@ export function useNewAgentLibraryView() { data: agent, isSuccess, error, - } = useGetV2GetLibraryAgent(agentId, { - query: { - select: okData, - }, - }); + } = useGetV2GetLibraryAgent(agentId, { query: { select: okData } }); const [{ activeItem, activeTab: activeTabRaw }, setQueryStates] = useQueryStates({ @@ -53,7 +48,7 @@ export function useNewAgentLibraryView() { } = useGetV2GetASpecificPreset(activeItem ?? "", { query: { enabled: Boolean(activeTab === "templates" && activeItem), - select: okData, + select: okData, }, }); const activeTemplate = diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx index 414aa3863b..61161088fc 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx @@ -23,7 +23,7 @@ import LoadingBox from "@/components/__legacy__/ui/loading"; import { useToastOnFail } from "@/components/molecules/Toast/use-toast"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; import { formatScheduleTime } from "@/lib/timezone-utils"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { PlayIcon } from "lucide-react"; import { AgentRunStatus } from "./agent-run-status-chip"; @@ -48,11 +48,7 @@ export function AgentScheduleDetailsView({ const toastOnFail = useToastOnFail(); // Get user's timezone for displaying schedule times - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const infoStats: { label: string; value: React.ReactNode }[] = useMemo(() => { return [ diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx index e998823a89..30c3e7d777 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx @@ -4,8 +4,8 @@ import { Button } from "@/components/__legacy__/ui/button"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { CronScheduler } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler"; import { Dialog } from "@/components/molecules/Dialog/Dialog"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { InfoIcon } from "lucide-react"; // Base type for cron expression only @@ -50,11 +50,7 @@ export function CronSchedulerDialog(props: CronSchedulerDialogProps) { ); // Get user's timezone - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const timezoneDisplay = getTimezoneDisplayName(userTimezone || "UTC"); // Reset state when dialog opens diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts index f997726e21..c74a37e6d0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts @@ -1,15 +1,20 @@ +import { + GraphExecutionMeta as LegacyGraphExecutionMeta, + GraphID, + GraphExecutionID, +} from "@/lib/autogpt-server-api"; +import { getQueryClient } from "@/lib/react-query/queryClient"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { getV1ListGraphExecutionsResponse, getV1ListGraphExecutionsResponse200, useGetV1ListGraphExecutionsInfinite, } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { GraphExecutionsPaginated } from "@/app/api/__generated__/models/graphExecutionsPaginated"; -import { getQueryClient } from "@/lib/react-query/queryClient"; -import { - GraphExecutionMeta as LegacyGraphExecutionMeta, - GraphID, - GraphExecutionID, -} from "@/lib/autogpt-server-api"; import { GraphExecutionMeta as RawGraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; export type GraphExecutionMeta = Omit< @@ -44,15 +49,7 @@ export const useAgentRunsInfinite = (graphID?: GraphID) => { { page: 1, page_size: 20 }, { query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as GraphExecutionsPaginated) - .pagination; - const hasMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return hasMore ? pagination.current_page + 1 : undefined; - }, + getNextPageParam: getPaginationNextPageNumber, // Prevent query from running if graphID is not available (yet) ...(!graphID @@ -80,15 +77,8 @@ export const useAgentRunsInfinite = (graphID?: GraphID) => { queryClient, ); - const agentRuns = - queryResults?.pages.flatMap((page) => { - const response = page.data as GraphExecutionsPaginated; - return response.executions; - }) ?? []; - - const agentRunCount = ( - queryResults?.pages.at(-1)?.data as GraphExecutionsPaginated | undefined - )?.pagination.total_items; + const agentRuns = queryResults ? unpaginate(queryResults, "executions") : []; + const agentRunCount = getPaginatedTotalCount(queryResults); const upsertAgentRun = (newAgentRun: GraphExecutionMeta) => { queryClient.setQueryData( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts index 8ae2c659a6..e9db9a02da 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts @@ -1,7 +1,11 @@ "use client"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; -import { LibraryAgentResponse } from "@/app/api/__generated__/models/libraryAgentResponse"; import { useLibraryPageContext } from "../state-provider"; import { useLibraryAgentsStore } from "@/hooks/useLibraryAgents/store"; import { getInitialData } from "./helpers"; @@ -11,7 +15,7 @@ export const useLibraryAgentList = () => { const { agents: cachedAgents } = useLibraryAgentsStore(); const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -26,27 +30,15 @@ export const useLibraryAgentList = () => { { query: { initialData: getInitialData(cachedAgents, searchTerm, 8), - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as LibraryAgentResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, + getNextPageParam: getPaginationNextPageNumber, }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - const response = page.data as LibraryAgentResponse; - return response.agents; - }) ?? []; - - const agentCount = agents?.pages?.[0] - ? (agents.pages[0].data as LibraryAgentResponse).pagination.total_items - : 0; + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const agentCount = getPaginatedTotalCount(agentsQueryData); return { allAgents, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts b/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts index 633ad72712..933670ca80 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts @@ -1,10 +1,15 @@ "use client"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2ListFavoriteLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; export function useFavoriteAgents() { const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -15,36 +20,14 @@ export function useFavoriteAgents() { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - // Only paginate on successful responses - if (!lastPage || lastPage.status !== 200) return undefined; - - const pagination = lastPage.data.pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - // Only process successful responses - if (!page || page.status !== 200) return []; - const response = page.data; - return response?.agents || []; - }) ?? []; - - const agentCount = (() => { - const firstPage = agents?.pages?.[0]; - // Only count from successful responses - if (!firstPage || firstPage.status !== 200) return 0; - return firstPage.data?.pagination?.total_items || 0; - })(); + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const agentCount = getPaginatedTotalCount(agentsQueryData); return { allAgents, diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx index f069510b01..ad35db11b1 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx @@ -15,11 +15,11 @@ import { ScrollArea } from "@/components/__legacy__/ui/scroll-area"; import { ClockIcon, Loader2 } from "lucide-react"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { formatScheduleTime, getTimezoneAbbreviation, } from "@/lib/timezone-utils"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { Select, SelectContent, @@ -66,11 +66,7 @@ export const SchedulesTable = ({ const [selectedFilter, setSelectedFilter] = useState(""); // Graph ID // Get user's timezone for displaying schedule times - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : "UTC"), - }, - }); + const userTimezone = useUserTimezone() ?? "UTC"; const filteredAndSortedSchedules = [...schedules] .filter( diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx index 5e70245ac9..3b5aa46839 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx @@ -7,6 +7,7 @@ import { useGetV1ListExecutionSchedulesForAUser, useDeleteV1DeleteExecutionSchedule, } from "@/app/api/__generated__/endpoints/schedules/schedules"; +import { okData } from "@/app/api/helpers"; import { Card } from "@/components/__legacy__/ui/card"; import { SchedulesTable } from "@/app/(platform)/monitoring/components/SchedulesTable"; @@ -34,8 +35,7 @@ const Monitor = () => { useGetV1ListExecutionSchedulesForAUser(); const deleteScheduleMutation = useDeleteV1DeleteExecutionSchedule(); - const schedules = - schedulesResponse?.status === 200 ? schedulesResponse.data : []; + const schedules = okData(schedulesResponse) ?? []; const removeSchedule = useCallback( async (scheduleId: string) => { diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts index 5fe691f025..d4ad54162e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts @@ -4,6 +4,7 @@ import { useDeleteV1RevokeApiKey, useGetV1ListUserApiKeys, } from "@/app/api/__generated__/endpoints/api-keys/api-keys"; +import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { getQueryClient } from "@/lib/react-query/queryClient"; @@ -13,11 +14,7 @@ export const useAPISection = () => { const { data: apiKeys, isLoading } = useGetV1ListUserApiKeys({ query: { - select: (res) => { - if (res.status !== 200) return undefined; - - return res.data.filter((key) => key.status === "ACTIVE"); - }, + select: (res) => okData(res)?.filter((key) => key.status === "ACTIVE"), }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts index 5b5afc5783..cf9749c53a 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts @@ -7,7 +7,6 @@ import { usePostOauthUploadAppLogo, getGetOauthListMyOauthAppsQueryKey, } from "@/app/api/__generated__/endpoints/oauth/oauth"; -import { OAuthApplicationInfo } from "@/app/api/__generated__/models/oAuthApplicationInfo"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { getQueryClient } from "@/lib/react-query/queryClient"; @@ -19,7 +18,7 @@ export const useOAuthApps = () => { const [uploadingAppId, setUploadingAppId] = useState(null); const { data: oauthAppsResponse, isLoading } = useGetOauthListMyOauthApps({ - query: { select: okData }, + query: { select: okData }, }); const { mutateAsync: updateStatus } = usePatchOauthUpdateAppStatus({ diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx index f0eb8a6b8c..8b4d48de83 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx @@ -6,6 +6,7 @@ import { useGetV1GetNotificationPreferences, useGetV1GetUserTimezone, } from "@/app/api/__generated__/endpoints/auth/auth"; +import { okData } from "@/app/api/helpers"; import { Text } from "@/components/atoms/Text/Text"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; @@ -24,7 +25,7 @@ export default function SettingsPage() { } = useGetV1GetNotificationPreferences({ query: { enabled: !!user, - select: (res) => (res.status === 200 ? res.data : null), + select: okData, }, }); @@ -32,9 +33,7 @@ export default function SettingsPage() { useGetV1GetUserTimezone({ query: { enabled: !!user, - select: (res) => { - return res.status === 200 ? String(res.data.timezone) : "not-set"; - }, + select: (res) => okData(res)?.timezone ?? "not-set", }, }); diff --git a/autogpt_platform/frontend/src/app/api/helpers.ts b/autogpt_platform/frontend/src/app/api/helpers.ts index 2ed45c9517..e9a708ba4c 100644 --- a/autogpt_platform/frontend/src/app/api/helpers.ts +++ b/autogpt_platform/frontend/src/app/api/helpers.ts @@ -1,7 +1,12 @@ +import type { InfiniteData } from "@tanstack/react-query"; import { getV1IsOnboardingEnabled, getV1OnboardingState, } from "./__generated__/endpoints/onboarding/onboarding"; +import { Pagination } from "./__generated__/models/pagination"; + +export type OKData = + (TResponse & { status: 200 })["data"]; /** * Narrow an orval response to its success payload if and only if it is a `200` status with OK shape. @@ -9,13 +14,15 @@ import { * Usage with React Query select: * ```ts * const { data: agent } = useGetV2GetLibraryAgent(agentId, { - * query: { select: okData }, + * query: { select: okData }, * }); * * data // is now properly typed as LibraryAgent | undefined * ``` */ -export function okData(res: unknown): T | undefined { +export function okData( + res: TResponse | undefined, +): OKData | undefined { if (!res || typeof res !== "object") return undefined; // status must exist and be exactly 200 @@ -26,7 +33,88 @@ export function okData(res: unknown): T | undefined { // check presence to safely return it as T; the generic T is enforced at call sites. if (!("data" in (res as Record))) return undefined; - return (res as { data: T }).data; + return res.data; +} + +export function getPaginatedTotalCount( + infiniteData: InfiniteData | undefined, + fallbackCount?: number, +): number { + const lastPage = infiniteData?.pages.at(-1); + if (!hasValidPaginationInfo(lastPage)) return fallbackCount ?? 0; + return lastPage.data.pagination.total_items ?? fallbackCount ?? 0; +} + +export function getPaginationNextPageNumber( + lastPage: + | { data: { pagination?: Pagination; [key: string]: any } } + | undefined, +): number | undefined { + if (!hasValidPaginationInfo(lastPage)) return undefined; + + const { pagination } = lastPage.data; + const hasMore = + pagination.current_page * pagination.page_size < pagination.total_items; + return hasMore ? pagination.current_page + 1 : undefined; +} + +/** Make one list from a paginated infinite query result. */ +export function unpaginate< + TResponse extends { status: number; data: any }, + TPageDataKey extends { + // Only allow keys for which the value is an array: + [K in keyof OKData]: OKData[K] extends any[] + ? K + : never; + }[keyof OKData] & + string, + TItemData extends OKData[TPageDataKey][number], +>( + infiniteData: InfiniteData, + pageListKey: TPageDataKey, +): TItemData[] { + return ( + infiniteData?.pages.flatMap((page) => { + if (!hasValidListPage(page, pageListKey)) return []; + return page.data[pageListKey] || []; + }) || [] + ); +} + +function hasValidListPage( + page: unknown, + pageListKey: TKey, +): page is { status: 200; data: { [key in TKey]: any[] } } { + return ( + typeof page === "object" && + page !== null && + "status" in page && + page.status === 200 && + "data" in page && + typeof page.data === "object" && + page.data !== null && + pageListKey in page.data && + Array.isArray((page.data as Record)[pageListKey]) + ); +} + +function hasValidPaginationInfo( + page: unknown, +): page is { data: { pagination: Pagination; [key: string]: any } } { + return ( + typeof page === "object" && + page !== null && + "data" in page && + typeof page.data === "object" && + page.data !== null && + "pagination" in page.data && + typeof page.data.pagination === "object" && + page.data.pagination !== null && + "total_items" in page.data.pagination && + "total_pages" in page.data.pagination && + "current_page" in page.data.pagination && + "page_size" in page.data.pagination + ); } type ResponseWithData = { status: number; data: unknown }; diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 61a3600892..2ead2189ed 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -4624,7 +4624,7 @@ "get": { "tags": ["v2", "executions", "review", "v2", "executions", "review"], "summary": "Get Pending Reviews for Execution", - "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", + "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 404: If the graph execution doesn't exist or isn't owned by this user\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", "operationId": "getV2Get pending reviews for execution", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ @@ -4650,11 +4650,10 @@ } } }, - "400": { "description": "Invalid graph execution ID" }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, - "403": { "description": "Access denied to graph execution" }, + "404": { "description": "Graph execution not found" }, "422": { "description": "Validation Error", "content": { @@ -5349,7 +5348,11 @@ "responses": { "200": { "description": "Successful Response", - "content": { "application/json": { "schema": {} } } + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphMeta" } + } + } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" diff --git a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts index 66386882c6..f6478f6c2b 100644 --- a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts +++ b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts @@ -15,6 +15,7 @@ import { normalizePickerResponse, scopesIncludeDrive, } from "./helpers"; +import { okData } from "@/app/api/helpers"; const defaultScopes = ["https://www.googleapis.com/auth/drive.file"]; @@ -126,9 +127,9 @@ export function useGoogleDrivePicker(options: Props) { ); const response = await queryClient.fetchQuery(queryOptions); + const cred = okData(response); - if (response.status === 200 && response.data) { - const cred = response.data; + if (cred) { if (cred.type === "oauth2") { const oauthCred = cred as OAuth2Credentials; if (oauthCred.access_token) { diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx index 2766f2d477..896840ba08 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx @@ -34,7 +34,7 @@ export function AgentSelectStep({ }: Props) { const { // Data - agents, + myAgents, isLoading, error, // State @@ -99,7 +99,7 @@ export function AgentSelectStep({ description="Select your project that you'd like to publish" /> - {agents.length === 0 ? ( + {myAgents.length === 0 ? (
Uh-oh.. It seems like you don't have any agents in your @@ -130,7 +130,7 @@ export function AgentSelectStep({
- {agents.map((agent) => ( + {myAgents.map((agent) => (
(null); - const { data: myAgents, isLoading, error } = useGetV2GetMyAgents(); - - const agents: Agent[] = - (myAgents?.status === 200 && - myAgents.data.agents - .map( - (agent): Agent => ({ - name: agent.agent_name, - id: agent.agent_id, - version: agent.agent_version, - lastEdited: agent.last_edited.toLocaleDateString(), - imageSrc: agent.agent_image || "https://picsum.photos/300/200", - description: agent.description || "", - recommendedScheduleCron: agent.recommended_schedule_cron ?? null, - }), - ) - .sort( - (a: Agent, b: Agent) => - new Date(b.lastEdited).getTime() - new Date(a.lastEdited).getTime(), - )) || - []; + const { + data: _myAgents, + isLoading, + error, + } = useGetV2GetMyAgents(undefined, { + query: { + select: (res) => + okData(res) + ?.agents.map( + (agent): Agent => ({ + name: agent.agent_name, + id: agent.agent_id, + version: agent.agent_version, + lastEdited: agent.last_edited.toLocaleDateString(), + imageSrc: agent.agent_image || "https://picsum.photos/300/200", + description: agent.description || "", + recommendedScheduleCron: agent.recommended_schedule_cron ?? null, + }), + ) + .sort( + (a: Agent, b: Agent) => + new Date(b.lastEdited).getTime() - + new Date(a.lastEdited).getTime(), + ), + }, + }); + const myAgents = _myAgents ?? []; const handleAgentClick = ( _: string, @@ -70,7 +77,7 @@ export function useAgentSelectStep({ const handleNext = () => { if (selectedAgentId && selectedAgentVersion) { - const selectedAgent = agents.find( + const selectedAgent = myAgents.find( (agent) => agent.id === selectedAgentId, ); if (selectedAgent) { @@ -86,7 +93,7 @@ export function useAgentSelectStep({ return { // Data - agents, + myAgents, isLoading, error, // State diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts index df8402906b..9dbd8aaf7e 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts @@ -4,6 +4,7 @@ import { useExecutionEvents } from "@/hooks/useExecutionEvents"; import { useLibraryAgents } from "@/hooks/useLibraryAgents/useLibraryAgents"; import type { GraphExecution } from "@/lib/autogpt-server-api/types"; import { useCallback, useEffect, useMemo, useState } from "react"; +import { okData } from "@/app/api/helpers"; import { NotificationState, categorizeExecutions, @@ -26,7 +27,7 @@ export function useAgentActivityDropdown() { isSuccess: executionsSuccess, error: executionsError, } = useGetV1ListAllExecutions({ - query: { select: (res) => (res.status === 200 ? res.data : null) }, + query: { select: okData }, }); // Get all graph IDs from agentInfoMap diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx index 49790547e4..863b9f601f 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx @@ -7,6 +7,7 @@ import { useBreakpoint } from "@/lib/hooks/useBreakpoint"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { useMemo } from "react"; +import { okData } from "@/app/api/helpers"; import { getAccountMenuItems, loggedInLinks, loggedOutLinks } from "../helpers"; import { AccountMenu } from "./AccountMenu/AccountMenu"; import { AgentActivityDropdown } from "./AgentActivityDropdown/AgentActivityDropdown"; @@ -29,7 +30,7 @@ export function NavbarView({ isLoggedIn, previewBranchName }: NavbarViewProps) { const { data: profile, isLoading: isProfileLoading } = useGetV2GetUserProfile( { query: { - select: (res) => (res.status === 200 ? res.data : null), + select: okData, enabled: isLoggedIn && !!user, // Include user ID in query key to ensure cache invalidation when user changes queryKey: ["/api/store/profile", user?.id], diff --git a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx index 12014e50fe..2b04c0ed9a 100644 --- a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx +++ b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx @@ -7,6 +7,7 @@ import { cn } from "@/lib/utils"; import { Text } from "@/components/atoms/Text/Text"; import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; +import { okData } from "@/app/api/helpers"; import { useGraphStore } from "@/app/(platform)/build/stores/graphStore"; import { useShallow } from "zustand/react/shallow"; @@ -29,13 +30,11 @@ export function FloatingReviewsPanel({ { query: { enabled: !!(graphId && executionId), + select: okData, }, }, ); - const executionStatus = - executionDetails?.status === 200 ? executionDetails.data.status : undefined; - // Get graph execution status from the store (updated via WebSocket) const graphExecutionStatus = useGraphStore( useShallow((state) => state.graphExecutionStatus), @@ -49,7 +48,7 @@ export function FloatingReviewsPanel({ if (executionId) { refetch(); } - }, [executionStatus, executionId, refetch]); + }, [executionDetails?.status, executionId, refetch]); // Refetch when graph execution status changes to REVIEW useEffect(() => { @@ -62,7 +61,7 @@ export function FloatingReviewsPanel({ !executionId || (!isLoading && pendingReviews.length === 0 && - executionStatus !== AgentExecutionStatus.REVIEW) + executionDetails?.status !== AgentExecutionStatus.REVIEW) ) { return null; } diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx index ddc9bab972..3253b0ee6d 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx @@ -44,8 +44,8 @@ export function PendingReviewsList({ const reviewActionMutation = usePostV2ProcessReviewAction({ mutation: { - onSuccess: (data: any) => { - if (data.status !== 200) { + onSuccess: (res) => { + if (res.status !== 200) { toast({ title: "Failed to process reviews", description: "Unexpected response from server", @@ -54,18 +54,18 @@ export function PendingReviewsList({ return; } - const response = data.data; + const result = res.data; - if (response.failed_count > 0) { + if (result.failed_count > 0) { toast({ title: "Reviews partially processed", - description: `${response.approved_count + response.rejected_count} succeeded, ${response.failed_count} failed. ${response.error || "Some reviews could not be processed."}`, + description: `${result.approved_count + result.rejected_count} succeeded, ${result.failed_count} failed. ${result.error || "Some reviews could not be processed."}`, variant: "destructive", }); } else { toast({ title: "Reviews processed successfully", - description: `${response.approved_count} approved, ${response.rejected_count} rejected`, + description: `${result.approved_count} approved, ${result.rejected_count} rejected`, variant: "default", }); } diff --git a/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts index 654ef858b6..07a2b33674 100644 --- a/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts +++ b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts @@ -7,6 +7,7 @@ import { import { useToast } from "@/components/molecules/Toast/use-toast"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useQueryClient } from "@tanstack/react-query"; import { Graph } from "@/lib/autogpt-server-api/types"; @@ -47,15 +48,19 @@ export function useAgentSafeMode(graph: GraphModel | LibraryAgent | Graph) { const { data: libraryAgent, isLoading } = useGetV2GetLibraryAgentByGraphId( graphId, {}, - { query: { enabled: !isAgent && shouldShowToggle } }, + { + query: { + enabled: !isAgent && shouldShowToggle, + select: okData, + }, + }, ); const [localSafeMode, setLocalSafeMode] = useState(null); useEffect(() => { - if (!isAgent && libraryAgent?.status === 200) { - const backendValue = - libraryAgent.data?.settings?.human_in_the_loop_safe_mode; + if (!isAgent && libraryAgent) { + const backendValue = libraryAgent.settings?.human_in_the_loop_safe_mode; if (backendValue !== undefined) { setLocalSafeMode(backendValue); } diff --git a/autogpt_platform/frontend/src/hooks/usePendingReviews.ts b/autogpt_platform/frontend/src/hooks/usePendingReviews.ts index 111b50a491..8257814fcf 100644 --- a/autogpt_platform/frontend/src/hooks/usePendingReviews.ts +++ b/autogpt_platform/frontend/src/hooks/usePendingReviews.ts @@ -2,12 +2,13 @@ import { useGetV2GetPendingReviews, useGetV2GetPendingReviewsForExecution, } from "@/app/api/__generated__/endpoints/executions/executions"; +import { okData } from "@/app/api/helpers"; export function usePendingReviews() { const query = useGetV2GetPendingReviews(); return { - pendingReviews: (query.data?.status === 200 ? query.data.data : []) || [], + pendingReviews: okData(query.data) || [], isLoading: query.isLoading, error: query.error, refetch: query.refetch, @@ -18,7 +19,7 @@ export function usePendingReviewsForExecution(graphExecId: string) { const query = useGetV2GetPendingReviewsForExecution(graphExecId); return { - pendingReviews: (query.data?.status === 200 ? query.data.data : []) || [], + pendingReviews: okData(query.data) || [], isLoading: query.isLoading, error: query.error, refetch: query.refetch, diff --git a/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts b/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts new file mode 100644 index 0000000000..7d5cef3a04 --- /dev/null +++ b/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts @@ -0,0 +1,8 @@ +import { okData } from "@/app/api/helpers"; +import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; + +export function useUserTimezone(): "not-set" | string | undefined { + return useGetV1GetUserTimezone({ + query: { select: (res) => okData(res)?.timezone }, + }).data; +} diff --git a/autogpt_platform/frontend/src/lib/react-query/queryClient.ts b/autogpt_platform/frontend/src/lib/react-query/queryClient.ts index 836c505c2f..512629e65b 100644 --- a/autogpt_platform/frontend/src/lib/react-query/queryClient.ts +++ b/autogpt_platform/frontend/src/lib/react-query/queryClient.ts @@ -21,6 +21,10 @@ function makeQueryClient() { let browserQueryClient: QueryClient | undefined = undefined; +/** Only for use *outside client component context* + * (so in server components, API helpers, etc.). + * + * In the context of client components, you should always use `useQueryClient()`. */ export function getQueryClient() { if (isServer) { // Server: create new client every time (so one user's data doesn't leak to another) From c3e407ef09a42f1c6c122363f96c0ce2a39527ef Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Mon, 22 Dec 2025 07:00:58 +0530 Subject: [PATCH 16/25] feat(frontend): add hover state to edge delete button in FlowEditor (#11601) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The delete button on flow editor edges is always visible, which creates visual clutter. This change makes the button only appear on hover, improving the UI while keeping it accessible. ### Changes 🏗️ - Added hover state management using `useState` to track when the edge delete button is hovered - Applied opacity transition to the delete button (fades in on hover, fades out when not hovered) - Added `onMouseEnter` and `onMouseLeave` handlers to the button to control hover state - Used `cn` utility for conditional className management - Button remains interactive even when `opacity-0` (still clickable for better UX) ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Hover over an edge in the flow editor and verify the delete button fades in smoothly - [x] Move mouse away from edge and verify the delete button fades out smoothly - [x] Click the delete button while hovered to verify it still removes the edge connection - [x] Test with multiple edges to ensure hover state is independent per edge --- .../build/components/FlowEditor/edges/CustomEdge.tsx | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx index b49fd11602..ff80fdc8ac 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx @@ -1,3 +1,4 @@ +import { memo, useState } from "react"; import { Button } from "@/components/atoms/Button/Button"; import { BaseEdge, @@ -20,7 +21,6 @@ export type CustomEdgeData = { }; export type CustomEdge = XYEdge; -import { memo } from "react"; const CustomEdge = ({ id, @@ -35,6 +35,8 @@ const CustomEdge = ({ selected, }: EdgeProps) => { const removeConnection = useEdgeStore((state) => state.removeEdge); + const [isHovered, setIsHovered] = useState(false); + const [edgePath, labelX, labelY] = getBezierPath({ sourceX, sourceY, @@ -69,12 +71,17 @@ const CustomEdge = ({ From 88731b1f76a6ad0887f16f1159d5b682f8db101c Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Mon, 22 Dec 2025 12:13:06 +0100 Subject: [PATCH 17/25] feat(platform): marketplace update notifications with enhanced publishing workflow (#11630) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR implements a comprehensive marketplace update notification system that allows users to discover and update to newer agent versions, along with enhanced publishing workflows and UI improvements. image image image image ## Core Features ### 🔔 Marketplace Update Notifications - **Update detection**: Automatically detects when marketplace has newer agent versions than user's local copy - **Creator notifications**: Shows banners for creators with unpublished changes ready to publish - **Non-creator support**: Enables regular users to discover and update to newer marketplace versions - **Version comparison**: Intelligent logic comparing `graph_version` vs marketplace listing versions ### 📋 Enhanced Publishing Workflow - **Builder integration**: Added "Publish to Marketplace" button directly in the builder actions - **Unified banner system**: Consistent `MarketplaceBanners` component across library and marketplace pages - **Streamlined UX**: Fixed layout issues, improved button placement and styling - **Modal improvements**: Fixed thumbnail loading race conditions and infinite loop bugs ### 📚 Version History & Changelog - **Inline version history**: Added version changelog directly to marketplace agent pages - **Version comparison**: Clear display of available versions with current version highlighting - **Update mechanism**: Direct updates using `graph_version` parameter for accuracy ## Technical Implementation ### Backend Changes - **Database schema**: Added `agentGraphVersions` and `agentGraphId` fields to `StoreAgent` model - **API enhancement**: Updated store endpoints to expose graph version data for version comparison - **Data migration**: Fixed agent version field naming from `version` to `agentGraphVersions` - **Model updates**: Enhanced `LibraryAgentUpdateRequest` with `graph_version` field ### Frontend Architecture - **`useMarketplaceUpdate` hook**: Centralized marketplace update detection and creator identification - **`MarketplaceBanners` component**: Unified banner system with proper vertical layout and styling - **`AgentVersionChangelog` component**: Version history display for marketplace pages - **`PublishToMarketplace` component**: Builder integration with modal workflow ### Key Bug Fixes - **Thumbnail loading**: Fixed race condition where images wouldn't load on first modal open - **Infinite loops**: Used refs to prevent circular dependencies in `useThumbnailImages` hook - **Layout issues**: Fixed banner placement, removed duplicate breadcrumbs, corrected vertical layout - **Field naming**: Fixed `agent_version` vs `version` field inconsistencies across APIs ## Files Changed ### Backend - `autogpt_platform/backend/backend/server/v2/store/` - Enhanced store API with graph version data - `autogpt_platform/backend/backend/server/v2/library/` - Updated library API models - `autogpt_platform/backend/migrations/` - Database migrations for version fields - `autogpt_platform/backend/schema.prisma` - Schema updates for graph versions ### Frontend - `src/app/(platform)/components/MarketplaceBanners/` - New unified banner component - `src/app/(platform)/library/agents/[id]/components/` - Enhanced library views with banners - `src/app/(platform)/build/components/BuilderActions/` - Added marketplace publish button - `src/app/(platform)/marketplace/components/AgentInfo/` - Added inline version history - `src/components/contextual/PublishAgentModal/` - Fixed thumbnail loading and modal workflow ## User Experience Impact - **Better discovery**: Users automatically notified of newer agent versions - **Streamlined publishing**: Direct publish access from builder interface - **Reduced friction**: Fixed UI bugs, improved loading states, consistent design - **Enhanced transparency**: Inline version history on marketplace pages - **Creator workflow**: Better notifications for creators with unpublished changes ## Testing - ✅ Update banners appear correctly when marketplace has newer versions - ✅ Creator banners show for users with unpublished changes - ✅ Version comparison logic works with graph_version vs marketplace versions - ✅ Publish button in builder opens modal correctly with pre-populated data - ✅ Thumbnail images load properly on first modal open without infinite loops - ✅ Database migrations completed successfully with version field fixes - ✅ All existing tests updated and passing with new schema changes 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --------- Co-authored-by: Claude Co-authored-by: Lluis Agusti Co-authored-by: Ubbe Co-authored-by: Reinier van der Leer --- .../backend/api/features/library/db.py | 23 +- .../backend/api/features/library/model.py | 3 + .../api/features/library/routes/agents.py | 1 + .../backend/api/features/store/cache.py | 6 +- .../backend/backend/api/features/store/db.py | 28 +- .../backend/api/features/store/db_test.py | 6 + .../backend/api/features/store/model.py | 11 + .../backend/api/features/store/model_test.py | 2 + .../backend/api/features/store/routes.py | 8 +- .../backend/api/features/store/routes_test.py | 6 +- .../backend/backend/data/onboarding.py | 2 + .../migration.sql | 45 +++ .../migration.sql | 81 +++++ autogpt_platform/backend/schema.prisma | 12 +- .../backend/snapshots/agt_details | 8 +- .../marketplace/components/ExpandleRow.tsx | 6 +- .../BuilderActions/BuilderActions.tsx | 2 + .../PublishToMarketplace.tsx | 36 ++ .../usePublishToMarketplace.ts | 48 +++ .../NewAgentLibraryView.tsx | 332 +++++++++++------- .../components/AgentVersionChangelog.tsx | 137 ++++++++ .../components/other/AgentSettingsButton.tsx | 14 +- .../SelectedRunView/SelectedRunView.tsx | 42 ++- .../SelectedRunActions/SelectedRunActions.tsx | 2 - .../SelectedScheduleView.tsx | 3 + .../SelectedTemplateView.tsx | 4 +- .../SelectedTriggerView.tsx | 4 +- .../selected-views/SelectedViewLayout.tsx | 7 + .../hooks/useMarketplaceUpdate.ts | 163 +++++++++ .../useNewAgentLibraryView.ts | 7 +- .../components/AgentInfo/AgentInfo.tsx | 143 +++++++- .../MainAgentPage/MainAgentPage.tsx | 97 ++--- .../MainAgentPage/useMainAgentPage.ts | 14 +- .../AgentTableRow/AgentTableRow.tsx | 3 + .../AgentTableRow/useAgentTableRow.ts | 4 +- .../MainDashboardPage/MainDashboardPage.tsx | 1 + .../frontend/src/app/api/openapi.json | 43 +++ .../MarketplaceBanners/MarketplaceBanners.tsx | 102 ++++++ .../PublishAgentModal/PublishAgentModal.tsx | 10 +- .../AgentInfoStep/AgentInfoStep.tsx | 37 ++ .../components/useThumbnailImages.ts | 11 + .../components/AgentInfoStep/helpers.ts | 145 ++++++-- .../AgentInfoStep/useAgentInfoStep.ts | 11 +- .../components/AgentReviewStep.tsx | 4 +- .../contextual/PublishAgentModal/helpers.ts | 4 + .../PublishAgentModal/usePublishAgentModal.ts | 151 +++++++- .../contextual/marketplaceHelpers.ts | 57 +++ 47 files changed, 1600 insertions(+), 286 deletions(-) create mode 100644 autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql create mode 100644 autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/hooks/useMarketplaceUpdate.ts create mode 100644 autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx create mode 100644 autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts diff --git a/autogpt_platform/backend/backend/api/features/library/db.py b/autogpt_platform/backend/backend/api/features/library/db.py index ad34326700..69ed0d2730 100644 --- a/autogpt_platform/backend/backend/api/features/library/db.py +++ b/autogpt_platform/backend/backend/api/features/library/db.py @@ -538,6 +538,7 @@ async def update_library_agent( library_agent_id: str, user_id: str, auto_update_version: Optional[bool] = None, + graph_version: Optional[int] = None, is_favorite: Optional[bool] = None, is_archived: Optional[bool] = None, is_deleted: Optional[Literal[False]] = None, @@ -550,6 +551,7 @@ async def update_library_agent( library_agent_id: The ID of the LibraryAgent to update. user_id: The owner of this LibraryAgent. auto_update_version: Whether the agent should auto-update to active version. + graph_version: Specific graph version to update to. is_favorite: Whether this agent is marked as a favorite. is_archived: Whether this agent is archived. settings: User-specific settings for this library agent. @@ -563,8 +565,8 @@ async def update_library_agent( """ logger.debug( f"Updating library agent {library_agent_id} for user {user_id} with " - f"auto_update_version={auto_update_version}, is_favorite={is_favorite}, " - f"is_archived={is_archived}, settings={settings}" + f"auto_update_version={auto_update_version}, graph_version={graph_version}, " + f"is_favorite={is_favorite}, is_archived={is_archived}, settings={settings}" ) update_fields: prisma.types.LibraryAgentUpdateManyMutationInput = {} if auto_update_version is not None: @@ -581,10 +583,23 @@ async def update_library_agent( update_fields["isDeleted"] = is_deleted if settings is not None: update_fields["settings"] = SafeJson(settings.model_dump()) - if not update_fields: - raise ValueError("No values were passed to update") try: + # If graph_version is provided, update to that specific version + if graph_version is not None: + # Get the current agent to find its graph_id + agent = await get_library_agent(id=library_agent_id, user_id=user_id) + # Update to the specified version using existing function + return await update_agent_version_in_library( + user_id=user_id, + agent_graph_id=agent.graph_id, + agent_graph_version=graph_version, + ) + + # Otherwise, just update the simple fields + if not update_fields: + raise ValueError("No values were passed to update") + n_updated = await prisma.models.LibraryAgent.prisma().update_many( where={"id": library_agent_id, "userId": user_id}, data=update_fields, diff --git a/autogpt_platform/backend/backend/api/features/library/model.py b/autogpt_platform/backend/backend/api/features/library/model.py index ab4bec586e..c20f82afae 100644 --- a/autogpt_platform/backend/backend/api/features/library/model.py +++ b/autogpt_platform/backend/backend/api/features/library/model.py @@ -385,6 +385,9 @@ class LibraryAgentUpdateRequest(pydantic.BaseModel): auto_update_version: Optional[bool] = pydantic.Field( default=None, description="Auto-update the agent version" ) + graph_version: Optional[int] = pydantic.Field( + default=None, description="Specific graph version to update to" + ) is_favorite: Optional[bool] = pydantic.Field( default=None, description="Mark the agent as a favorite" ) diff --git a/autogpt_platform/backend/backend/api/features/library/routes/agents.py b/autogpt_platform/backend/backend/api/features/library/routes/agents.py index 5a043009fc..38c34dd3b8 100644 --- a/autogpt_platform/backend/backend/api/features/library/routes/agents.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/agents.py @@ -285,6 +285,7 @@ async def update_library_agent( library_agent_id=library_agent_id, user_id=user_id, auto_update_version=payload.auto_update_version, + graph_version=payload.graph_version, is_favorite=payload.is_favorite, is_archived=payload.is_archived, settings=payload.settings, diff --git a/autogpt_platform/backend/backend/api/features/store/cache.py b/autogpt_platform/backend/backend/api/features/store/cache.py index 7832069d49..5d9bc24e5d 100644 --- a/autogpt_platform/backend/backend/api/features/store/cache.py +++ b/autogpt_platform/backend/backend/api/features/store/cache.py @@ -43,10 +43,12 @@ async def _get_cached_store_agents( # Cache individual agent details for 15 minutes @cached(maxsize=200, ttl_seconds=300, shared_cache=True) -async def _get_cached_agent_details(username: str, agent_name: str): +async def _get_cached_agent_details( + username: str, agent_name: str, include_changelog: bool = False +): """Cached helper to get agent details.""" return await store_db.get_store_agent_details( - username=username, agent_name=agent_name + username=username, agent_name=agent_name, include_changelog=include_changelog ) diff --git a/autogpt_platform/backend/backend/api/features/store/db.py b/autogpt_platform/backend/backend/api/features/store/db.py index 12f1783468..8e5a39df89 100644 --- a/autogpt_platform/backend/backend/api/features/store/db.py +++ b/autogpt_platform/backend/backend/api/features/store/db.py @@ -257,7 +257,7 @@ async def log_search_term(search_query: str): async def get_store_agent_details( - username: str, agent_name: str + username: str, agent_name: str, include_changelog: bool = False ) -> store_model.StoreAgentDetails: """Get PUBLIC store agent details from the StoreAgent view""" logger.debug(f"Getting store agent details for {username}/{agent_name}") @@ -322,6 +322,27 @@ async def get_store_agent_details( else: recommended_schedule_cron = None + # Fetch changelog data if requested + changelog_data = None + if include_changelog and store_listing: + changelog_versions = ( + await prisma.models.StoreListingVersion.prisma().find_many( + where={ + "storeListingId": store_listing.id, + "submissionStatus": prisma.enums.SubmissionStatus.APPROVED, + }, + order=[{"version": "desc"}], + ) + ) + changelog_data = [ + store_model.ChangelogEntry( + version=str(version.version), + changes_summary=version.changesSummary or "No changes recorded", + date=version.createdAt, + ) + for version in changelog_versions + ] + logger.debug(f"Found agent details for {username}/{agent_name}") return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, @@ -338,10 +359,13 @@ async def get_store_agent_details( runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, active_version_id=active_version_id, has_approved_version=has_approved_version, recommended_schedule_cron=recommended_schedule_cron, + changelog=changelog_data, ) except store_exceptions.AgentNotFoundError: raise @@ -409,6 +433,8 @@ async def get_store_agent_by_version_id( runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, ) except store_exceptions.AgentNotFoundError: diff --git a/autogpt_platform/backend/backend/api/features/store/db_test.py b/autogpt_platform/backend/backend/api/features/store/db_test.py index 641f392d86..b48ce5db95 100644 --- a/autogpt_platform/backend/backend/api/features/store/db_test.py +++ b/autogpt_platform/backend/backend/api/features/store/db_test.py @@ -40,6 +40,8 @@ async def test_get_store_agents(mocker): runs=10, rating=4.5, versions=["1.0"], + agentGraphVersions=["1"], + agentGraphId="test-graph-id", updated_at=datetime.now(), is_available=False, useForOnboarding=False, @@ -83,6 +85,8 @@ async def test_get_store_agent_details(mocker): runs=10, rating=4.5, versions=["1.0"], + agentGraphVersions=["1"], + agentGraphId="test-graph-id", updated_at=datetime.now(), is_available=False, useForOnboarding=False, @@ -105,6 +109,8 @@ async def test_get_store_agent_details(mocker): runs=15, rating=4.8, versions=["1.0", "2.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id-active", updated_at=datetime.now(), is_available=True, useForOnboarding=False, diff --git a/autogpt_platform/backend/backend/api/features/store/model.py b/autogpt_platform/backend/backend/api/features/store/model.py index 745c969ae6..972898b296 100644 --- a/autogpt_platform/backend/backend/api/features/store/model.py +++ b/autogpt_platform/backend/backend/api/features/store/model.py @@ -7,6 +7,12 @@ import pydantic from backend.util.models import Pagination +class ChangelogEntry(pydantic.BaseModel): + version: str + changes_summary: str + date: datetime.datetime + + class MyAgent(pydantic.BaseModel): agent_id: str agent_version: int @@ -55,12 +61,17 @@ class StoreAgentDetails(pydantic.BaseModel): runs: int rating: float versions: list[str] + agentGraphVersions: list[str] + agentGraphId: str last_updated: datetime.datetime recommended_schedule_cron: str | None = None active_version_id: str | None = None has_approved_version: bool = False + # Optional changelog data when include_changelog=True + changelog: list[ChangelogEntry] | None = None + class Creator(pydantic.BaseModel): name: str diff --git a/autogpt_platform/backend/backend/api/features/store/model_test.py b/autogpt_platform/backend/backend/api/features/store/model_test.py index 3633e6549e..a37966601b 100644 --- a/autogpt_platform/backend/backend/api/features/store/model_test.py +++ b/autogpt_platform/backend/backend/api/features/store/model_test.py @@ -72,6 +72,8 @@ def test_store_agent_details(): runs=50, rating=4.5, versions=["1.0", "2.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id", last_updated=datetime.datetime.now(), ) assert details.slug == "test-agent" diff --git a/autogpt_platform/backend/backend/api/features/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py index 7d4db50d3f..7816b25d5a 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -152,7 +152,11 @@ async def get_agents( tags=["store", "public"], response_model=store_model.StoreAgentDetails, ) -async def get_agent(username: str, agent_name: str): +async def get_agent( + username: str, + agent_name: str, + include_changelog: bool = fastapi.Query(default=False), +): """ This is only used on the AgentDetails Page. @@ -162,7 +166,7 @@ async def get_agent(username: str, agent_name: str): # URL decode the agent name since it comes from the URL path agent_name = urllib.parse.unquote(agent_name).lower() agent = await store_cache._get_cached_agent_details( - username=username, agent_name=agent_name + username=username, agent_name=agent_name, include_changelog=include_changelog ) return agent diff --git a/autogpt_platform/backend/backend/api/features/store/routes_test.py b/autogpt_platform/backend/backend/api/features/store/routes_test.py index b9c040c149..7fdc0b9ebb 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/store/routes_test.py @@ -374,6 +374,8 @@ def test_get_agent_details( runs=100, rating=4.5, versions=["1.0.0", "1.1.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id", last_updated=FIXED_NOW, ) mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agent_details") @@ -387,7 +389,9 @@ def test_get_agent_details( assert data.creator == "creator1" snapshot.snapshot_dir = "snapshots" snapshot.assert_match(json.dumps(response.json(), indent=2), "agt_details") - mock_db_call.assert_called_once_with(username="creator1", agent_name="test-agent") + mock_db_call.assert_called_once_with( + username="creator1", agent_name="test-agent", include_changelog=False + ) def test_get_creators_defaults( diff --git a/autogpt_platform/backend/backend/data/onboarding.py b/autogpt_platform/backend/backend/data/onboarding.py index d9977e9535..cc63b89afd 100644 --- a/autogpt_platform/backend/backend/data/onboarding.py +++ b/autogpt_platform/backend/backend/data/onboarding.py @@ -442,6 +442,8 @@ async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]: runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, ) for agent in recommended_agents diff --git a/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql b/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql new file mode 100644 index 0000000000..676fe641b6 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql @@ -0,0 +1,45 @@ +-- Fix StoreSubmission view to use agentGraphVersion instead of version for agent_version field +-- This ensures that submission.agent_version returns the actual agent graph version, not the store listing version number + +BEGIN; + +-- Recreate the view with the corrected agent_version field (using agentGraphVersion instead of version) +CREATE OR REPLACE VIEW "StoreSubmission" AS +SELECT + sl.id AS listing_id, + sl."owningUserId" AS user_id, + slv."agentGraphId" AS agent_id, + slv."agentGraphVersion" AS agent_version, + sl.slug, + COALESCE(slv.name, '') AS name, + slv."subHeading" AS sub_heading, + slv.description, + slv.instructions, + slv."imageUrls" AS image_urls, + slv."submittedAt" AS date_submitted, + slv."submissionStatus" AS status, + COALESCE(ar.run_count, 0::bigint) AS runs, + COALESCE(avg(sr.score::numeric), 0.0)::double precision AS rating, + slv.id AS store_listing_version_id, + slv."reviewerId" AS reviewer_id, + slv."reviewComments" AS review_comments, + slv."internalComments" AS internal_comments, + slv."reviewedAt" AS reviewed_at, + slv."changesSummary" AS changes_summary, + slv."videoUrl" AS video_url, + slv.categories +FROM "StoreListing" sl + JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl.id + LEFT JOIN "StoreListingReview" sr ON sr."storeListingVersionId" = slv.id + LEFT JOIN ( + SELECT "AgentGraphExecution"."agentGraphId", count(*) AS run_count + FROM "AgentGraphExecution" + GROUP BY "AgentGraphExecution"."agentGraphId" + ) ar ON ar."agentGraphId" = slv."agentGraphId" +WHERE sl."isDeleted" = false +GROUP BY sl.id, sl."owningUserId", slv.id, slv."agentGraphId", slv."agentGraphVersion", sl.slug, slv.name, + slv."subHeading", slv.description, slv.instructions, slv."imageUrls", slv."submittedAt", + slv."submissionStatus", slv."reviewerId", slv."reviewComments", slv."internalComments", + slv."reviewedAt", slv."changesSummary", slv."videoUrl", slv.categories, ar.run_count; + +COMMIT; \ No newline at end of file diff --git a/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql b/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql new file mode 100644 index 0000000000..495ac113b4 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql @@ -0,0 +1,81 @@ +-- Add agentGraphVersions field to StoreAgent view for consistent version comparison +-- This keeps the existing versions field unchanged and adds a new field with graph versions +-- This makes it safe for version comparison with LibraryAgent.graph_version + +BEGIN; + +-- Drop and recreate the StoreAgent view with new agentGraphVersions field +DROP VIEW IF EXISTS "StoreAgent"; + +CREATE OR REPLACE VIEW "StoreAgent" AS +WITH latest_versions AS ( + SELECT + "storeListingId", + MAX(version) AS max_version + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +), +agent_versions AS ( + SELECT + "storeListingId", + array_agg(DISTINCT version::text ORDER BY version::text) AS versions + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +), +agent_graph_versions AS ( + SELECT + "storeListingId", + array_agg(DISTINCT "agentGraphVersion"::text ORDER BY "agentGraphVersion"::text) AS graph_versions + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +) +SELECT + sl.id AS listing_id, + slv.id AS "storeListingVersionId", + slv."createdAt" AS updated_at, + sl.slug, + COALESCE(slv.name, '') AS agent_name, + slv."videoUrl" AS agent_video, + slv."agentOutputDemoUrl" AS agent_output_demo, + COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image, + slv."isFeatured" AS featured, + p.username AS creator_username, -- Allow NULL for malformed sub-agents + p."avatarUrl" AS creator_avatar, -- Allow NULL for malformed sub-agents + slv."subHeading" AS sub_heading, + slv.description, + slv.categories, + slv.search, + COALESCE(ar.run_count, 0::bigint) AS runs, + COALESCE(rs.avg_rating, 0.0)::double precision AS rating, + COALESCE(av.versions, ARRAY[slv.version::text]) AS versions, + COALESCE(agv.graph_versions, ARRAY[slv."agentGraphVersion"::text]) AS "agentGraphVersions", + slv."agentGraphId", + slv."isAvailable" AS is_available, + COALESCE(sl."useForOnboarding", false) AS "useForOnboarding" +FROM "StoreListing" sl +JOIN latest_versions lv + ON sl.id = lv."storeListingId" +JOIN "StoreListingVersion" slv + ON slv."storeListingId" = lv."storeListingId" + AND slv.version = lv.max_version + AND slv."submissionStatus" = 'APPROVED' +JOIN "AgentGraph" a + ON slv."agentGraphId" = a.id + AND slv."agentGraphVersion" = a.version +LEFT JOIN "Profile" p + ON sl."owningUserId" = p."userId" +LEFT JOIN "mv_review_stats" rs + ON sl.id = rs."storeListingId" +LEFT JOIN "mv_agent_run_counts" ar + ON a.id = ar."agentGraphId" +LEFT JOIN agent_versions av + ON sl.id = av."storeListingId" +LEFT JOIN agent_graph_versions agv + ON sl.id = agv."storeListingId" +WHERE sl."isDeleted" = false + AND sl."hasApprovedVersion" = true; + +COMMIT; \ No newline at end of file diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index d81cd4d1b1..2f6c109c03 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -734,11 +734,13 @@ view StoreAgent { description String categories String[] search Unsupported("tsvector")? @default(dbgenerated("''::tsvector")) - runs Int - rating Float - versions String[] - is_available Boolean @default(true) - useForOnboarding Boolean @default(false) + runs Int + rating Float + versions String[] + agentGraphVersions String[] + agentGraphId String + is_available Boolean @default(true) + useForOnboarding Boolean @default(false) // Materialized views used (refreshed every 15 minutes via pg_cron): // - mv_agent_run_counts - Pre-aggregated agent execution counts by agentGraphId diff --git a/autogpt_platform/backend/snapshots/agt_details b/autogpt_platform/backend/snapshots/agt_details index 649b5ed644..0d69f1c23a 100644 --- a/autogpt_platform/backend/snapshots/agt_details +++ b/autogpt_platform/backend/snapshots/agt_details @@ -23,8 +23,14 @@ "1.0.0", "1.1.0" ], + "agentGraphVersions": [ + "1", + "2" + ], + "agentGraphId": "test-graph-id", "last_updated": "2023-01-01T00:00:00", "recommended_schedule_cron": null, "active_version_id": null, - "has_approved_version": false + "has_approved_version": false, + "changelog": null } \ No newline at end of file diff --git a/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx b/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx index cf0f2389aa..e13a5eeed3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx @@ -102,7 +102,7 @@ export function ExpandableRow({ Version Status - {/* Changes */} + Changes Submitted Reviewed External Comments @@ -127,9 +127,9 @@ export function ExpandableRow({ )} {getStatusBadge(version.status)} - {/* + {version.changes_summary || "No summary"} - */} + {version.date_submitted ? formatDistanceToNow( diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx index afe70bd434..64eb624621 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx @@ -2,6 +2,7 @@ import { parseAsString, useQueryStates } from "nuqs"; import { AgentOutputs } from "./components/AgentOutputs/AgentOutputs"; import { RunGraph } from "./components/RunGraph/RunGraph"; import { ScheduleGraph } from "./components/ScheduleGraph/ScheduleGraph"; +import { PublishToMarketplace } from "./components/PublishToMarketplace/PublishToMarketplace"; import { memo } from "react"; export const BuilderActions = memo(() => { @@ -13,6 +14,7 @@ export const BuilderActions = memo(() => { +
); }); diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx new file mode 100644 index 0000000000..1e6545dfbd --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx @@ -0,0 +1,36 @@ +import { ShareIcon } from "@phosphor-icons/react"; +import { BuilderActionButton } from "../BuilderActionButton"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; +import { usePublishToMarketplace } from "./usePublishToMarketplace"; +import { PublishAgentModal } from "@/components/contextual/PublishAgentModal/PublishAgentModal"; + +export const PublishToMarketplace = ({ flowID }: { flowID: string | null }) => { + const { handlePublishToMarketplace, publishState, handleStateChange } = + usePublishToMarketplace({ flowID }); + + return ( + <> + + + + + + + Publish to Marketplace + + + + + ); +}; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts new file mode 100644 index 0000000000..ceaa4de905 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts @@ -0,0 +1,48 @@ +import { useState, useCallback } from "react"; +import type { StoreSubmission } from "@/app/api/__generated__/models/storeSubmission"; + +export type PublishStep = "select" | "info" | "review"; + +export type PublishState = { + isOpen: boolean; + step: PublishStep; + submissionData: StoreSubmission | null; +}; + +const defaultPublishState: PublishState = { + isOpen: false, + step: "select", + submissionData: null, +}; + +interface UsePublishToMarketplaceProps { + flowID: string | null; +} + +export function usePublishToMarketplace({ + flowID, +}: UsePublishToMarketplaceProps) { + const [publishState, setPublishState] = + useState(defaultPublishState); + + const handlePublishToMarketplace = () => { + if (!flowID) return; + + // Open the publish modal starting with the select step + setPublishState({ + isOpen: true, + step: "select", + submissionData: null, + }); + }; + + const handleStateChange = useCallback((newState: PublishState) => { + setPublishState(newState); + }, []); + + return { + handlePublishToMarketplace, + publishState, + handleStateChange, + }; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx index 2d7a1b30f4..3768a0d150 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx @@ -5,8 +5,13 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { cn } from "@/lib/utils"; import { PlusIcon } from "@phosphor-icons/react"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal"; +import { useMarketplaceUpdate } from "./hooks/useMarketplaceUpdate"; +import { AgentVersionChangelog } from "./components/AgentVersionChangelog"; +import { MarketplaceBanners } from "@/components/contextual/MarketplaceBanners/MarketplaceBanners"; +import { PublishAgentModal } from "@/components/contextual/PublishAgentModal/PublishAgentModal"; +import { AgentSettingsButton } from "./components/other/AgentSettingsButton"; import { AgentRunsLoading } from "./components/other/AgentRunsLoading"; import { EmptySchedules } from "./components/other/EmptySchedules"; import { EmptyTasks } from "./components/other/EmptyTasks"; @@ -16,9 +21,9 @@ import { SectionWrap } from "./components/other/SectionWrap"; import { LoadingSelectedContent } from "./components/selected-views/LoadingSelectedContent"; import { SelectedRunView } from "./components/selected-views/SelectedRunView/SelectedRunView"; import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView"; +import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView"; import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView"; -import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout"; import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers"; @@ -26,6 +31,7 @@ import { useNewAgentLibraryView } from "./useNewAgentLibraryView"; export function NewAgentLibraryView() { const { + agentId, agent, ready, activeTemplate, @@ -39,18 +45,79 @@ export function NewAgentLibraryView() { handleSelectRun, handleCountsChange, handleClearSelectedRun, - onRunInitiated, handleSelectSettings, + onRunInitiated, onTriggerSetup, onScheduleCreated, } = useNewAgentLibraryView(); + const { + hasAgentMarketplaceUpdate, + hasMarketplaceUpdate, + latestMarketplaceVersion, + isUpdating, + modalOpen, + setModalOpen, + handlePublishUpdate, + handleUpdateToLatest, + } = useMarketplaceUpdate({ agent }); + + const [changelogOpen, setChangelogOpen] = useState(false); + useEffect(() => { if (agent) { document.title = `${agent.name} - Library - AutoGPT Platform`; } }, [agent]); + function renderMarketplaceUpdateBanner() { + return ( + setChangelogOpen(true)} + /> + ); + } + + function renderPublishAgentModal() { + if (!modalOpen || !agent) return null; + + return ( + { + if (!state.isOpen) { + setModalOpen(false); + } + }} + /> + ); + } + + function renderVersionChangelog() { + if (!agent) return null; + + return ( + setChangelogOpen(false)} + /> + ); + } + if (error) { return ( -
-
+ <> +
+
+
+ +
-
- -
-
+ {renderPublishAgentModal()} + {renderVersionChangelog()} + ); } return ( -
- -
- - New task - - } - agent={agent} - onRunCreated={onRunInitiated} - onScheduleCreated={onScheduleCreated} - onTriggerSetup={onTriggerSetup} - initialInputValues={activeTemplate?.inputs} - initialInputCredentials={activeTemplate?.credentials} - /> -
+ <> +
+ +
+
+ + New task + + } + agent={agent} + onRunCreated={onRunInitiated} + onScheduleCreated={onScheduleCreated} + onTriggerSetup={onTriggerSetup} + initialInputValues={activeTemplate?.inputs} + initialInputCredentials={activeTemplate?.credentials} + /> + +
+
- -
- - {activeItem ? ( - activeItem === "settings" ? ( - - ) : activeTab === "scheduled" ? ( - - ) : activeTab === "templates" ? ( - handleSelectRun(execution.id, "runs")} - onSwitchToRunsTab={() => setActiveTab("runs")} - /> - ) : activeTab === "triggers" ? ( - setActiveTab("runs")} - /> - ) : ( - - ) - ) : sidebarLoading ? ( - - ) : activeTab === "scheduled" ? ( - - - - ) : activeTab === "templates" ? ( - - - - ) : activeTab === "triggers" ? ( - - - - ) : ( - - + + {activeItem ? ( + activeItem === "settings" ? ( + + ) : activeTab === "scheduled" ? ( + + ) : activeTab === "templates" ? ( + + handleSelectRun(execution.id, "runs") + } + onSwitchToRunsTab={() => setActiveTab("runs")} + banner={renderMarketplaceUpdateBanner()} + /> + ) : activeTab === "triggers" ? ( + setActiveTab("runs")} + banner={renderMarketplaceUpdateBanner()} + /> + ) : ( + + ) + ) : sidebarLoading ? ( + + ) : activeTab === "scheduled" ? ( + - - )} -
+ banner={renderMarketplaceUpdateBanner()} + > + + + ) : activeTab === "templates" ? ( + + + + ) : activeTab === "triggers" ? ( + + + + ) : ( + + + + )} +
+ {renderPublishAgentModal()} + {renderVersionChangelog()} + ); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx new file mode 100644 index 0000000000..8781376b17 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx @@ -0,0 +1,137 @@ +"use client"; + +import { Text } from "@/components/atoms/Text/Text"; +import { Dialog } from "@/components/molecules/Dialog/Dialog"; +import { Skeleton } from "@/components/__legacy__/ui/skeleton"; +import { useGetV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; +import type { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; +import React from "react"; + +interface AgentVersionChangelogProps { + agent: LibraryAgent; + isOpen: boolean; + onClose: () => void; +} + +interface VersionInfo { + version: number; + isCurrentVersion: boolean; +} + +export function AgentVersionChangelog({ + agent, + isOpen, + onClose, +}: AgentVersionChangelogProps) { + // Get marketplace data if agent has marketplace listing + const { data: storeAgentData, isLoading } = useGetV2GetSpecificAgent( + agent?.marketplace_listing?.creator.slug || "", + agent?.marketplace_listing?.slug || "", + {}, + { + query: { + enabled: !!( + agent?.marketplace_listing?.creator.slug && + agent?.marketplace_listing?.slug + ), + }, + }, + ); + + // Create version info from available graph versions + const storeData = okData(storeAgentData) as StoreAgentDetails | undefined; + const agentVersions: VersionInfo[] = storeData?.agentGraphVersions + ? storeData.agentGraphVersions + .map((versionStr: string) => parseInt(versionStr, 10)) + .sort((a: number, b: number) => b - a) // Sort descending (newest first) + .map((version: number) => ({ + version, + isCurrentVersion: version === agent.graph_version, + })) + : []; + + const renderVersionItem = (versionInfo: VersionInfo) => { + return ( +
+
+
+ + v{versionInfo.version} + + {versionInfo.isCurrentVersion && ( + + Current + + )} +
+
+ + + Available marketplace version + +
+ ); + }; + + return ( + { + if (!isOpen) { + onClose(); + } + }, + }} + > + +
+ {isLoading ? ( +
+ + + + +
+ ) : agentVersions.length > 0 ? ( +
+ + View changes and updates across different versions of this + agent. + + {agentVersions.map(renderVersionItem)} +
+ ) : ( +
+ + No version history available for this agent. + +
+ )} +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx index bc710ebc4e..11dcbd943f 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx @@ -6,9 +6,14 @@ import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; interface Props { agent: LibraryAgent; onSelectSettings: () => void; + selected?: boolean; } -export function AgentSettingsButton({ agent, onSelectSettings }: Props) { +export function AgentSettingsButton({ + agent, + onSelectSettings, + selected, +}: Props) { const { hasHITLBlocks } = useAgentSafeMode(agent); if (!hasHITLBlocks) { @@ -17,13 +22,16 @@ export function AgentSettingsButton({ agent, onSelectSettings }: Props) { return ( ); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx index 9e470139ff..c66f0e9245 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx @@ -32,6 +32,7 @@ interface Props { runId: string; onSelectRun?: (id: string) => void; onClearSelectedRun?: () => void; + banner?: React.ReactNode; onSelectSettings?: () => void; selectedSettings?: boolean; } @@ -41,7 +42,9 @@ export function SelectedRunView({ runId, onSelectRun, onClearSelectedRun, + banner, onSelectSettings, + selectedSettings, }: Props) { const { run, preset, isLoading, responseError, httpError } = useSelectedRunView(agent.graph_id, runId); @@ -81,7 +84,12 @@ export function SelectedRunView({ return (
- +
@@ -105,7 +113,7 @@ export function SelectedRunView({ )} @@ -130,20 +138,22 @@ export function SelectedRunView({ {/* Human-in-the-Loop Reviews Section */} {withReviews && ( -
- {reviewsLoading ? ( - - ) : pendingReviews.length > 0 ? ( - - ) : ( - - No pending reviews for this execution - - )} +
+ + {reviewsLoading ? ( + + ) : pendingReviews.length > 0 ? ( + + ) : ( + + No pending reviews for this execution + + )} +
)} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx index cb821b2ecd..83c836def4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx @@ -15,7 +15,6 @@ import { SelectedActionsWrap } from "../../../SelectedActionsWrap"; import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton"; import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal"; import { useSelectedRunActions } from "./useSelectedRunActions"; -import { SafeModeToggle } from "../SafeModeToggle"; type Props = { agent: LibraryAgent; @@ -113,7 +112,6 @@ export function SelectedRunActions({ shareToken={run.share_token} /> )} - {canRunManually && ( <> + )} +
+ ) : ( +
+ Version {version} +
+ )}
diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx index 5eb3984cbc..4e5b9de6c4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx @@ -2,6 +2,7 @@ import { Separator } from "@/components/__legacy__/ui/separator"; import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { okData } from "@/app/api/helpers"; import { MarketplaceAgentPageParams } from "../../agent/[creator]/[slug]/page"; import { AgentImages } from "../AgentImages/AgentImage"; import { AgentInfo } from "../AgentInfo/AgentInfo"; @@ -10,24 +11,33 @@ import { AgentsSection } from "../AgentsSection/AgentsSection"; import { BecomeACreator } from "../BecomeACreator/BecomeACreator"; import { useMainAgentPage } from "./useMainAgentPage"; -type MainAgentPageProps = { +interface Props { params: MarketplaceAgentPageParams; -}; +} -export const MainAgentPage = ({ params }: MainAgentPageProps) => { +export function MainAgentPage({ params }: Props) { const { agent, - otherAgents, - similarAgents, - libraryAgent, + user, isLoading, hasError, - user, + similarAgents, + otherAgents, + libraryAgent, } = useMainAgentPage({ params }); if (isLoading) { - return ; + return ( +
+
+
+ +
+
+
+ ); } + if (hasError) { return (
@@ -46,7 +56,8 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { ); } - if (!agent) { + const agentData = okData(agent); + if (!agentData) { return (
@@ -55,8 +66,6 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { isSuccess={false} responseError={{ message: "Agent not found" }} context="agent page" - onRetry={() => window.location.reload()} - className="w-full max-w-md" />
@@ -67,10 +76,10 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { const breadcrumbs = [ { name: "Marketplace", link: "/marketplace" }, { - name: agent.creator, - link: `/marketplace/creator/${encodeURIComponent(agent.creator)}`, + name: agentData.creator ?? "", + link: `/marketplace/creator/${encodeURIComponent(agentData.creator ?? "")}`, }, - { name: agent.agent_name, link: "#" }, + { name: agentData.agent_name ?? "", link: "#" }, ]; return ( @@ -82,18 +91,29 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => {
parseInt(v, 10)), + ).toString() + : "1" + } + storeListingVersionId={agentData.store_listing_version_id ?? ""} isAgentAddedToLibrary={Boolean(libraryAgent)} + creatorSlug={params.creator} + agentSlug={params.slug} />
{ const orderedImages: string[] = []; // 1. YouTube/Overview video (if it exists) - if (agent.agent_video) { - orderedImages.push(agent.agent_video); + if (agentData.agent_video) { + orderedImages.push(agentData.agent_video); } // 2. First image (hero) - if (agent.agent_image.length > 0) { - orderedImages.push(agent.agent_image[0]); + if (agentData.agent_image?.length > 0) { + orderedImages.push(agentData.agent_image[0]); } // 3. Agent Output Demo (if it exists) - if ((agent as any).agent_output_demo) { - orderedImages.push((agent as any).agent_output_demo); + if (agentData.agent_output_demo) { + orderedImages.push(agentData.agent_output_demo); } // 4. Additional images - if (agent.agent_image.length > 1) { - orderedImages.push(...agent.agent_image.slice(1)); + if (agentData.agent_image && agentData.agent_image.length > 1) { + orderedImages.push(...agentData.agent_image.slice(1)); } return orderedImages; @@ -129,7 +149,7 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { )} @@ -140,13 +160,8 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { sectionTitle="Similar agents" /> )} - - +
); -}; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts index ef38f336d0..674955545e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts @@ -5,8 +5,8 @@ import { import { MarketplaceAgentPageParams } from "../../agent/[creator]/[slug]/page"; import { useGetV2GetAgentByStoreId } from "@/app/api/__generated__/endpoints/library/library"; import { StoreAgentsResponse } from "@/app/api/__generated__/models/storeAgentsResponse"; -import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; export const useMainAgentPage = ({ @@ -20,13 +20,7 @@ export const useMainAgentPage = ({ data: agent, isLoading: isAgentLoading, isError: isAgentError, - } = useGetV2GetSpecificAgent(creator_lower, params.slug, { - query: { - select: (x) => { - return x.data as StoreAgentDetails; - }, - }, - }); + } = useGetV2GetSpecificAgent(creator_lower, params.slug); const { data: otherAgents, isLoading: isOtherAgentsLoading, @@ -59,12 +53,12 @@ export const useMainAgentPage = ({ data: libraryAgent, isLoading: isLibraryAgentLoading, isError: isLibraryAgentError, - } = useGetV2GetAgentByStoreId(agent?.active_version_id ?? "", { + } = useGetV2GetAgentByStoreId(okData(agent)?.active_version_id ?? "", { query: { select: (x) => { return x.data as LibraryAgent; }, - enabled: !!user && !!agent?.active_version_id, + enabled: !!user && !!okData(agent)?.active_version_id, }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx index bdc735ea80..5b85ade1ae 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx @@ -33,6 +33,7 @@ export interface AgentTableRowProps { video_url?: string; categories?: string[]; store_listing_version_id?: string; + changes_summary?: string; onViewSubmission: (submission: StoreSubmission) => void; onDeleteSubmission: (submission_id: string) => void; onEditSubmission: ( @@ -58,6 +59,7 @@ export const AgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, onViewSubmission, onDeleteSubmission, onEditSubmission, @@ -80,6 +82,7 @@ export const AgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, }); // Determine if we should show Edit or View button diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts index 7014eec198..14fbac4336 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts @@ -25,6 +25,7 @@ interface useAgentTableRowProps { video_url?: string; categories?: string[]; store_listing_version_id?: string; + changes_summary?: string; } export const useAgentTableRow = ({ @@ -44,6 +45,7 @@ export const useAgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, }: useAgentTableRowProps) => { const handleView = () => { onViewSubmission({ @@ -72,7 +74,7 @@ export const useAgentTableRow = ({ image_urls: imageSrc, video_url, categories, - changes_summary: "Update Submission", + changes_summary: changes_summary || "Update Submission", store_listing_version_id, agent_id, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx index 71968d08c9..e53244db77 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx @@ -98,6 +98,7 @@ export const MainDashboardPage = () => { slug: submission.slug, store_listing_version_id: submission.store_listing_version_id || undefined, + changes_summary: submission.changes_summary || undefined, }))} onViewSubmission={onViewSubmission} onDeleteSubmission={onDeleteSubmission} diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 2ead2189ed..ea3bbcc5d8 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -5113,6 +5113,16 @@ "in": "path", "required": true, "schema": { "type": "string", "title": "Agent Name" } + }, + { + "name": "include_changelog", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Include Changelog" + } } ], "responses": { @@ -6510,6 +6520,16 @@ "required": ["file"], "title": "Body_postV2Upload submission media" }, + "ChangelogEntry": { + "properties": { + "version": { "type": "string", "title": "Version" }, + "changes_summary": { "type": "string", "title": "Changes Summary" }, + "date": { "type": "string", "format": "date-time", "title": "Date" } + }, + "type": "object", + "required": ["version", "changes_summary", "date"], + "title": "ChangelogEntry" + }, "ChatRequest": { "properties": { "query": { "type": "string", "title": "Query" }, @@ -7953,6 +7973,11 @@ "title": "Auto Update Version", "description": "Auto-update the agent version" }, + "graph_version": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version", + "description": "Specific graph version to update to" + }, "is_favorite": { "anyOf": [{ "type": "boolean" }, { "type": "null" }], "title": "Is Favorite", @@ -9508,6 +9533,12 @@ "type": "array", "title": "Versions" }, + "agentGraphVersions": { + "items": { "type": "string" }, + "type": "array", + "title": "Agentgraphversions" + }, + "agentGraphId": { "type": "string", "title": "Agentgraphid" }, "last_updated": { "type": "string", "format": "date-time", @@ -9525,6 +9556,16 @@ "type": "boolean", "title": "Has Approved Version", "default": false + }, + "changelog": { + "anyOf": [ + { + "items": { "$ref": "#/components/schemas/ChangelogEntry" }, + "type": "array" + }, + { "type": "null" } + ], + "title": "Changelog" } }, "type": "object", @@ -9543,6 +9584,8 @@ "runs", "rating", "versions", + "agentGraphVersions", + "agentGraphId", "last_updated" ], "title": "StoreAgentDetails" diff --git a/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx b/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx new file mode 100644 index 0000000000..4f826f6e85 --- /dev/null +++ b/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx @@ -0,0 +1,102 @@ +"use client"; + +import { Button } from "@/components/atoms/Button/Button"; +import { Text } from "@/components/atoms/Text/Text"; + +interface MarketplaceBannersProps { + hasUpdate?: boolean; + latestVersion?: number; + hasUnpublishedChanges?: boolean; + currentVersion?: number; + isUpdating?: boolean; + onUpdate?: () => void; + onPublish?: () => void; + onViewChanges?: () => void; +} + +export function MarketplaceBanners({ + hasUpdate, + latestVersion, + hasUnpublishedChanges, + isUpdating, + onUpdate, + onPublish, +}: MarketplaceBannersProps) { + const renderUpdateBanner = () => { + if (hasUpdate && latestVersion) { + return ( +
+
+
+ + Update available + + + You should update your agent in order to get the latest / best + results + +
+ {onUpdate && ( +
+ +
+ )} +
+
+ ); + } + return null; + }; + + const renderUnpublishedChangesBanner = () => { + if (hasUnpublishedChanges) { + return ( +
+
+
+ + Unpublished changes + + + You've made changes to this agent that aren't + published yet. Would you like to publish the latest version? + +
+ {onPublish && ( +
+ +
+ )} +
+
+ ); + } + return null; + }; + + return ( + <> + {renderUpdateBanner()} + {renderUnpublishedChangesBanner()} + + ); +} diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx index 2f5f9aeacf..dd91094f9c 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx @@ -18,6 +18,8 @@ export function PublishAgentModal({ trigger, targetState, onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, }: Props) { const { // State @@ -34,7 +36,12 @@ export function PublishAgentModal({ handleGoToBuilder, handleSuccessFromInfo, handleBack, - } = usePublishAgentModal({ targetState, onStateChange }); + } = usePublishAgentModal({ + targetState, + onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, + }); const { user, isUserLoading } = useSupabase(); @@ -65,6 +72,7 @@ export function PublishAgentModal({ selectedAgentId={selectedAgentId} selectedAgentVersion={selectedAgentVersion} initialData={initialData} + isMarketplaceUpdate={!!currentState.submissionData} /> ); case "review": diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx index 3ec680ca95..7cd6b25d91 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx @@ -19,6 +19,7 @@ export function AgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate, }: Props) { const { form, @@ -34,6 +35,7 @@ export function AgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate, }); const [cronScheduleDialogOpen, setCronScheduleDialogOpen] = @@ -65,6 +67,41 @@ export function AgentInfoStep({ + {/* Changes summary field - only shown for updates */} + {isMarketplaceUpdate && ( + ( +
+ + + This is required to help users understand what's + different in this update. + +
+ )} + /> + )} + + {/* Optional section label for updates */} + {isMarketplaceUpdate && ( +
+ + Optional: Update any of the following details (or leave them + as-is) + +
+ )} + (null); const { toast } = useToast(); + // Memoize the stringified version to detect actual changes + const initialImagesKey = JSON.stringify(initialImages); + + // Update images when initialImages prop changes (by value, not reference) + useEffect(() => { + if (initialImages.length > 0) { + setImages(initialImages); + setSelectedImage(initialSelectedImage || initialImages[0]); + } + }, [initialImagesKey, initialSelectedImage]); // Use stringified key instead of array reference + // Notify parent when images change useEffect(() => { onImagesChange(images); diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts index bf7ed17219..86e6c0ce30 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts @@ -1,45 +1,113 @@ import z from "zod"; import { validateYouTubeUrl } from "@/lib/utils"; -export const publishAgentSchema = z.object({ - title: z - .string() - .min(1, "Title is required") - .max(100, "Title must be less than 100 characters"), - subheader: z - .string() - .min(1, "Subheader is required") - .max(200, "Subheader must be less than 200 characters"), - slug: z - .string() - .min(1, "Slug is required") - .max(50, "Slug must be less than 50 characters") - .regex( - /^[a-z0-9-]+$/, - "Slug can only contain lowercase letters, numbers, and hyphens", - ), - youtubeLink: z - .string() - .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), - category: z.string().min(1, "Category is required"), - description: z - .string() - .min(1, "Description is required") - .max(1000, "Description must be less than 1000 characters"), - recommendedScheduleCron: z.string().optional(), - instructions: z - .string() - .optional() - .refine( - (val) => !val || val.length <= 2000, - "Instructions must be less than 2000 characters", - ), - agentOutputDemo: z - .string() - .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), -}); +// Create conditional schema that changes based on whether it's a marketplace update +export const publishAgentSchemaFactory = ( + isMarketplaceUpdate: boolean = false, +) => { + const baseSchema = { + changesSummary: isMarketplaceUpdate + ? z + .string() + .min(1, "Changes summary is required for updates") + .max(500, "Changes summary must be less than 500 characters") + : z.string().optional(), + title: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 100, + "Title must be less than 100 characters", + ) + : z + .string() + .min(1, "Title is required") + .max(100, "Title must be less than 100 characters"), + subheader: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 200, + "Subheader must be less than 200 characters", + ) + : z + .string() + .min(1, "Subheader is required") + .max(200, "Subheader must be less than 200 characters"), + slug: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || (val.length <= 50 && /^[a-z0-9-]+$/.test(val)), + "Slug can only contain lowercase letters, numbers, and hyphens", + ) + : z + .string() + .min(1, "Slug is required") + .max(50, "Slug must be less than 50 characters") + .regex( + /^[a-z0-9-]+$/, + "Slug can only contain lowercase letters, numbers, and hyphens", + ), + youtubeLink: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || validateYouTubeUrl(val), + "Please enter a valid YouTube URL", + ) + : z + .string() + .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), + category: isMarketplaceUpdate + ? z.string().optional() + : z.string().min(1, "Category is required"), + description: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 1000, + "Description must be less than 1000 characters", + ) + : z + .string() + .min(1, "Description is required") + .max(1000, "Description must be less than 1000 characters"), + recommendedScheduleCron: z.string().optional(), + instructions: z + .string() + .optional() + .refine( + (val) => !val || val.length <= 2000, + "Instructions must be less than 2000 characters", + ), + agentOutputDemo: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || validateYouTubeUrl(val), + "Please enter a valid YouTube URL", + ) + : z + .string() + .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), + }; -export type PublishAgentFormData = z.infer; + return z.object(baseSchema); +}; + +// Default schema for backwards compatibility +export const publishAgentSchema = publishAgentSchemaFactory(false); + +export type PublishAgentFormData = z.infer< + ReturnType +>; export interface PublishAgentInfoInitialData { agent_id: string; @@ -54,4 +122,5 @@ export interface PublishAgentInfoInitialData { recommendedScheduleCron?: string; instructions?: string; agentOutputDemo?: string; + changesSummary?: string; } diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts index 6bec8dd355..f3dcfa1f21 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts @@ -9,7 +9,7 @@ import * as Sentry from "@sentry/nextjs"; import { PublishAgentFormData, PublishAgentInfoInitialData, - publishAgentSchema, + publishAgentSchemaFactory, } from "./helpers"; export interface Props { @@ -18,6 +18,7 @@ export interface Props { selectedAgentId: string | null; selectedAgentVersion: number | null; initialData?: PublishAgentInfoInitialData; + isMarketplaceUpdate?: boolean; } export function useAgentInfoStep({ @@ -26,6 +27,7 @@ export function useAgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate = false, }: Props) { const [agentId, setAgentId] = useState(null); const [images, setImages] = useState([]); @@ -36,8 +38,9 @@ export function useAgentInfoStep({ const api = useBackendAPI(); const form = useForm({ - resolver: zodResolver(publishAgentSchema), + resolver: zodResolver(publishAgentSchemaFactory(isMarketplaceUpdate)), defaultValues: { + changesSummary: "", title: "", subheader: "", slug: "", @@ -61,6 +64,7 @@ export function useAgentInfoStep({ // Update form with initial data form.reset({ + changesSummary: initialData.changesSummary || "", title: initialData.title, subheader: initialData.subheader, slug: initialData.slug.toLocaleLowerCase().trim(), @@ -104,9 +108,10 @@ export function useAgentInfoStep({ agent_output_demo_url: data.agentOutputDemo || "", agent_id: selectedAgentId || "", agent_version: selectedAgentVersion || 0, - slug: data.slug.replace(/\s+/g, "-"), + slug: (data.slug || "").replace(/\s+/g, "-"), categories: filteredCategories, recommended_schedule_cron: data.recommendedScheduleCron || null, + changes_summary: data.changesSummary || null, } as any); await queryClient.invalidateQueries({ diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx index ba7456aa0d..58caa334dd 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx @@ -52,7 +52,7 @@ export function AgentReviewStep({ {subheader} @@ -80,7 +80,7 @@ export function AgentReviewStep({ {description ? ( {description} diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts index a7175736b4..358c4da260 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts @@ -8,4 +8,8 @@ export const emptyModalState = { category: "", description: "", recommendedScheduleCron: "", + instructions: "", + agentOutputDemo: "", + changesSummary: "", + additionalImages: [], }; diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts index 8face2c6b8..f83698d8e7 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts @@ -3,6 +3,12 @@ import { useCallback, useEffect, useState } from "react"; import { PublishAgentInfoInitialData } from "./components/AgentInfoStep/helpers"; import { useRouter } from "next/navigation"; import { emptyModalState } from "./helpers"; +import { + useGetV2GetMyAgents, + useGetV2ListMySubmissions, +} from "@/app/api/__generated__/endpoints/store/store"; +import { okData } from "@/app/api/helpers"; +import type { MyAgent } from "@/app/api/__generated__/models/myAgent"; const defaultTargetState: PublishState = { isOpen: false, @@ -22,9 +28,16 @@ export interface Props { trigger?: React.ReactNode; targetState?: PublishState; onStateChange?: (state: PublishState) => void; + preSelectedAgentId?: string; + preSelectedAgentVersion?: number; } -export function usePublishAgentModal({ targetState, onStateChange }: Props) { +export function usePublishAgentModal({ + targetState, + onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, +}: Props) { const [currentState, setCurrentState] = useState( targetState || defaultTargetState, ); @@ -42,14 +55,20 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { const [_, setSelectedAgent] = useState(null); - const [selectedAgentId, setSelectedAgentId] = useState(null); + const [selectedAgentId, setSelectedAgentId] = useState( + preSelectedAgentId || null, + ); const [selectedAgentVersion, setSelectedAgentVersion] = useState< number | null - >(null); + >(preSelectedAgentVersion || null); const router = useRouter(); + // Fetch agent data for pre-populating form when agent is pre-selected + const { data: myAgents } = useGetV2GetMyAgents(); + const { data: mySubmissions } = useGetV2ListMySubmissions(); + // Sync currentState with targetState when it changes from outside useEffect(() => { if (targetState) { @@ -60,13 +79,90 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { // Reset internal state when modal opens useEffect(() => { if (!targetState) return; - if (targetState.isOpen && targetState.step === "select") { + if (targetState.isOpen) { setSelectedAgent(null); - setSelectedAgentId(null); - setSelectedAgentVersion(null); + setSelectedAgentId(preSelectedAgentId || null); + setSelectedAgentVersion(preSelectedAgentVersion || null); setInitialData(emptyModalState); } - }, [targetState]); + }, [targetState, preSelectedAgentId, preSelectedAgentVersion]); + + // Pre-populate form data when modal opens with info step and pre-selected agent + useEffect(() => { + if ( + !targetState?.isOpen || + targetState.step !== "info" || + !preSelectedAgentId || + !preSelectedAgentVersion + ) + return; + const agentsData = okData(myAgents) as any; + const submissionsData = okData(mySubmissions) as any; + + if (!agentsData || !submissionsData) return; + + // Find the agent data + const agent = agentsData.agents?.find( + (a: MyAgent) => a.agent_id === preSelectedAgentId, + ); + if (!agent) return; + + // Find published submission data for this agent (for updates) + const publishedSubmissionData = submissionsData.submissions + ?.filter( + (s: StoreSubmission) => + s.status === "APPROVED" && s.agent_id === preSelectedAgentId, + ) + .sort( + (a: StoreSubmission, b: StoreSubmission) => + b.agent_version - a.agent_version, + )[0]; + + // Populate initial data (same logic as handleNextFromSelect) + const initialFormData: PublishAgentInfoInitialData = publishedSubmissionData + ? { + agent_id: preSelectedAgentId, + title: publishedSubmissionData.name, + subheader: publishedSubmissionData.sub_heading || "", + description: publishedSubmissionData.description, + instructions: publishedSubmissionData.instructions || "", + youtubeLink: publishedSubmissionData.video_url || "", + agentOutputDemo: publishedSubmissionData.agent_output_demo_url || "", + additionalImages: [ + ...new Set(publishedSubmissionData.image_urls || []), + ].filter(Boolean) as string[], + category: publishedSubmissionData.categories?.[0] || "", + thumbnailSrc: agent.agent_image || "https://picsum.photos/300/200", + slug: publishedSubmissionData.slug, + recommendedScheduleCron: agent.recommended_schedule_cron || "", + changesSummary: publishedSubmissionData.changes_summary || "", + } + : { + ...emptyModalState, + agent_id: preSelectedAgentId, + title: agent.agent_name, + description: agent.description || "", + thumbnailSrc: agent.agent_image || "https://picsum.photos/300/200", + slug: agent.agent_name.replace(/ /g, "-"), + recommendedScheduleCron: agent.recommended_schedule_cron || "", + }; + + setInitialData(initialFormData); + + // Update the state with the submission data if this is an update + if (publishedSubmissionData) { + setCurrentState((prevState) => ({ + ...prevState, + submissionData: publishedSubmissionData, + })); + } + }, [ + targetState, + preSelectedAgentId, + preSelectedAgentVersion, + myAgents, + mySubmissions, + ]); function handleClose() { // Reset all internal state @@ -97,20 +193,43 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { imageSrc: string; recommendedScheduleCron: string | null; }, + publishedSubmissionData?: StoreSubmission | null, ) { - setInitialData({ - ...emptyModalState, - agent_id: agentId, - title: agentData.name, - description: agentData.description, - thumbnailSrc: agentData.imageSrc, - slug: agentData.name.replace(/ /g, "-"), - recommendedScheduleCron: agentData.recommendedScheduleCron || "", - }); + // Pre-populate with published data if this is an update, otherwise use agent data + const initialFormData: PublishAgentInfoInitialData = publishedSubmissionData + ? { + agent_id: agentId, + title: publishedSubmissionData.name, + subheader: publishedSubmissionData.sub_heading || "", + description: publishedSubmissionData.description, + instructions: publishedSubmissionData.instructions || "", + youtubeLink: publishedSubmissionData.video_url || "", + agentOutputDemo: publishedSubmissionData.agent_output_demo_url || "", + additionalImages: [ + ...new Set(publishedSubmissionData.image_urls || []), + ].filter(Boolean) as string[], + category: publishedSubmissionData.categories?.[0] || "", // Take first category + thumbnailSrc: agentData.imageSrc, // Use current agent image + slug: publishedSubmissionData.slug, + recommendedScheduleCron: agentData.recommendedScheduleCron || "", + changesSummary: publishedSubmissionData.changes_summary || "", // Pre-populate with existing changes summary + } + : { + ...emptyModalState, + agent_id: agentId, + title: agentData.name, + description: agentData.description, + thumbnailSrc: agentData.imageSrc, + slug: agentData.name.replace(/ /g, "-"), + recommendedScheduleCron: agentData.recommendedScheduleCron || "", + }; + + setInitialData(initialFormData); updateState({ ...currentState, step: "info", + submissionData: publishedSubmissionData || null, }); setSelectedAgentId(agentId); diff --git a/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts b/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts new file mode 100644 index 0000000000..a080eca57f --- /dev/null +++ b/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts @@ -0,0 +1,57 @@ +/** + * Marketplace-specific helper functions that can be reused across different marketplace screens + */ + +/** + * Calculate the latest marketplace version from agent graph versions + */ +export function getLatestMarketplaceVersion( + agentGraphVersions?: string[], +): number | undefined { + if (!agentGraphVersions?.length) return undefined; + + return Math.max(...agentGraphVersions.map((v: string) => parseInt(v, 10))); +} + +/** + * Check if the current user is the creator of the agent + * Uses ID-based comparison for accurate matching + */ +export function isUserCreator( + creatorId: string | undefined, + currentUserId: string | undefined, +): boolean { + if (!creatorId || !currentUserId) return false; + return creatorId === currentUserId; +} + +/** + * Calculate update status for an agent + */ +export function calculateUpdateStatus({ + latestMarketplaceVersion, + currentVersion, + isUserCreator, + isAgentAddedToLibrary, +}: { + latestMarketplaceVersion?: number; + currentVersion: number; + isUserCreator: boolean; + isAgentAddedToLibrary: boolean; +}) { + if (!latestMarketplaceVersion) { + return { hasUpdate: false, hasUnpublishedChanges: false }; + } + + const hasUnpublishedChanges = + isUserCreator && + isAgentAddedToLibrary && + currentVersion > latestMarketplaceVersion; + + const hasUpdate = + isAgentAddedToLibrary && + !isUserCreator && + latestMarketplaceVersion > currentVersion; + + return { hasUpdate, hasUnpublishedChanges }; +} From e26822998ff03c7a5e6b960d780619656759af6f Mon Sep 17 00:00:00 2001 From: "seer-by-sentry[bot]" <157164994+seer-by-sentry[bot]@users.noreply.github.com> Date: Fri, 26 Dec 2025 16:17:24 +0000 Subject: [PATCH 18/25] fix: Handle missing or null 'items' key in DataForSEO Related Keywords block (#10989) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Changes 🏗️ - Modified the DataForSEO Related Keywords block to handle cases where the 'items' key is missing or has a null value in the API response. - Ensures that the code gracefully handles these scenarios by defaulting to an empty list, preventing potential errors. Fixes [AUTOGPT-SERVER-66D](https://sentry.io/organizations/significant-gravitas/issues/6902944636/). ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] The DataForSEO API now returns an empty list when there are no results, preventing the code from attempting to iterate on a null value. --- > [!NOTE] > Strengthens parsing of DataForSEO Labs response to avoid errors when `items` is missing or null. > > - In `backend/blocks/dataforseo/related_keywords.py` `run()`, sets `items = first_result.get("items") or []` when `first_result` is a `dict`, otherwise `[]`, ensuring safe iteration > - Prevents exceptions and yields empty results when no items are returned > > Written by [Cursor Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit cc465ddbf21489f51e81df8bc6e82f0ca7827d3b. This will update automatically on new commits. Configure [here](https://cursor.com/dashboard?tab=bugbot). Co-authored-by: seer-by-sentry[bot] <157164994+seer-by-sentry[bot]@users.noreply.github.com> Co-authored-by: Toran Bruce Richards Co-authored-by: claude[bot] <41898282+claude[bot]@users.noreply.github.com> Co-authored-by: Nicholas Tindle Co-authored-by: Nicholas Tindle --- .../backend/blocks/dataforseo/related_keywords.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/autogpt_platform/backend/backend/blocks/dataforseo/related_keywords.py b/autogpt_platform/backend/backend/blocks/dataforseo/related_keywords.py index 7a7fbdd11a..0757cb6507 100644 --- a/autogpt_platform/backend/backend/blocks/dataforseo/related_keywords.py +++ b/autogpt_platform/backend/backend/blocks/dataforseo/related_keywords.py @@ -182,13 +182,10 @@ class DataForSeoRelatedKeywordsBlock(Block): if results and len(results) > 0: # results is a list, get the first element first_result = results[0] if isinstance(results, list) else results - items = ( - first_result.get("items", []) - if isinstance(first_result, dict) - else [] - ) - # Ensure items is never None - if items is None: + # Handle missing key, null value, or valid list value + if isinstance(first_result, dict): + items = first_result.get("items") or [] + else: items = [] for item in items: # Extract keyword_data from the item From dff8efa35d31d88001294b69c52f05085f98ef28 Mon Sep 17 00:00:00 2001 From: Ubbe Date: Tue, 30 Dec 2025 20:22:32 +0700 Subject: [PATCH 19/25] fix(frontend): favico colour override issue (#11681) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ Sometimes, on Dev, when navigating between pages, the Favico colour would revert from Green 🟢 (Dev) to Purple 🟣(Default). That's because the `/marketplace` page had custom code overriding it that I didn't notice earlier... I also made it use the Next.js metadata API, so it handles the favicon correctly across navigations. ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally and test the above --- .../src/app/(platform)/marketplace/page.tsx | 9 ++----- autogpt_platform/frontend/src/app/layout.tsx | 24 +++++++++---------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/page.tsx b/autogpt_platform/frontend/src/app/(platform)/marketplace/page.tsx index e95e230377..9b7e6ae93b 100644 --- a/autogpt_platform/frontend/src/app/(platform)/marketplace/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/page.tsx @@ -1,11 +1,11 @@ -import { Metadata } from "next"; -import { Suspense } from "react"; import { prefetchGetV2ListStoreAgentsQuery, prefetchGetV2ListStoreCreatorsQuery, } from "@/app/api/__generated__/endpoints/store/store"; import { getQueryClient } from "@/lib/react-query/queryClient"; import { dehydrate, HydrationBoundary } from "@tanstack/react-query"; +import { Metadata } from "next"; +import { Suspense } from "react"; import { MainMarkeplacePage } from "./components/MainMarketplacePage/MainMarketplacePage"; import { MainMarketplacePageLoading } from "./components/MainMarketplacePageLoading"; @@ -48,11 +48,6 @@ export const metadata: Metadata = { description: "Find and use AI Agents created by our community", images: ["/images/store-twitter.png"], }, - icons: { - icon: "/favicon.ico", - shortcut: "/favicon-16x16.png", - apple: "/apple-touch-icon.png", - }, }; export default async function MarketplacePage(): Promise { diff --git a/autogpt_platform/frontend/src/app/layout.tsx b/autogpt_platform/frontend/src/app/layout.tsx index ff2590dc61..453f816dee 100644 --- a/autogpt_platform/frontend/src/app/layout.tsx +++ b/autogpt_platform/frontend/src/app/layout.tsx @@ -15,9 +15,21 @@ import { environment } from "@/services/environment"; import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; import { headers } from "next/headers"; +const isDev = environment.isDev(); +const isLocal = environment.isLocal(); + +const faviconPath = isDev + ? "/favicon-dev.ico" + : isLocal + ? "/favicon-local.ico" + : "/favicon.ico"; + export const metadata: Metadata = { title: "AutoGPT Platform", description: "Your one stop shop to creating AI Agents", + icons: { + icon: faviconPath, + }, }; export default async function RootLayout({ @@ -27,8 +39,6 @@ export default async function RootLayout({ }>) { const headersList = await headers(); const host = headersList.get("host") || ""; - const isDev = environment.isDev(); - const isLocal = environment.isLocal(); return ( - Date: Tue, 30 Dec 2025 20:22:57 +0700 Subject: [PATCH 20/25] fix(frontend): use DS Dialog on old builder (#11643) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ Use the Design System `` on the old builder, which supports long content scrolling ( the current one does not, causing issues in graphs with many run inputs )... ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally and test the above ## Summary by CodeRabbit * **New Features** * Added Enhanced Rendering toggle for improved output handling and display (controlled via feature flag) * **Improvements** * Refined dialog layouts and user interactions * Enhanced copy-to-clipboard functionality with toast notifications upon copying ✏️ Tip: You can customize this high-level summary in your review settings. --- .../legacy-builder/ExpandableOutputDialog.tsx | 276 +++++++++--------- .../legacy-builder/RunnerInputUI.tsx | 54 ++-- 2 files changed, 173 insertions(+), 157 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/ExpandableOutputDialog.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/ExpandableOutputDialog.tsx index 0050c6cf64..98edbca2fb 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/ExpandableOutputDialog.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/ExpandableOutputDialog.tsx @@ -4,19 +4,12 @@ import { OutputActions, OutputItem, } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers"; +import { Dialog } from "@/components/molecules/Dialog/Dialog"; import { beautifyString } from "@/lib/utils"; import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { Clipboard, Maximize2 } from "lucide-react"; import React, { FC, useMemo, useState } from "react"; import { Button } from "../../../../../components/__legacy__/ui/button"; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, -} from "../../../../../components/__legacy__/ui/dialog"; import { ContentRenderer } from "../../../../../components/__legacy__/ui/render"; import { ScrollArea } from "../../../../../components/__legacy__/ui/scroll-area"; import { Separator } from "../../../../../components/__legacy__/ui/separator"; @@ -120,138 +113,155 @@ const ExpandableOutputDialog: FC = ({ }; return ( - - - - -
- - Full Output Preview -
- {enableEnhancedOutputHandling && ( -
- - -
- )} -
- - Execution ID: {execId} -
- Pin:{" "} - {beautifyString(pinName)} -
-
- -
- {useEnhancedRenderer && outputItems.length > 0 && ( -
- ({ - value: item.value, - metadata: item.metadata, - renderer: item.renderer, - }))} + +
+ + Full Output Preview +
+ {enableEnhancedOutputHandling && ( +
+ +
)} - -
- {data.length > 0 ? ( - useEnhancedRenderer ? ( -
- {outputItems.map((item) => ( - - ))} -
- ) : ( -
- {data.map((item, index) => ( -
-
- - Item {index + 1} of {data.length} - - -
- -
- -
-
- ))} -
- ) - ) : ( -
- No data available -
- )} -
-
+ } + controlled={{ + isOpen, + set: (open) => { + if (!open) onClose(); + }, + }} + onClose={onClose} + styling={{ + maxWidth: "56rem", + width: "90vw", + height: "90vh", + }} + > + +
+
+

+ Execution ID: {execId} +
+ Pin:{" "} + {beautifyString(pinName)} +

+
- -
- {data.length} item{data.length !== 1 ? "s" : ""} total -
-
- {!useEnhancedRenderer && ( - +
+ {useEnhancedRenderer && outputItems.length > 0 && ( +
+ ({ + value: item.value, + metadata: item.metadata, + renderer: item.renderer, + }))} + /> +
)} - + +
+ {data.length > 0 ? ( + useEnhancedRenderer ? ( +
+ {outputItems.map((item) => ( + + ))} +
+ ) : ( +
+ {data.map((item, index) => ( +
+
+ + Item {index + 1} of {data.length} + + +
+ +
+ +
+
+ ))} +
+ ) + ) : ( +
+ No data available +
+ )} +
+
- - + + +
+ {data.length} item{data.length !== 1 ? "s" : ""} total +
+
+ {!useEnhancedRenderer && ( + + )} + +
+
+
+
); }; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/RunnerInputUI.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/RunnerInputUI.tsx index bff21c46f2..15983be9f5 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/RunnerInputUI.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/RunnerInputUI.tsx @@ -1,17 +1,11 @@ -import React, { useCallback } from "react"; +import { useCallback } from "react"; +import { AgentRunDraftView } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view"; +import { Dialog } from "@/components/molecules/Dialog/Dialog"; import type { CredentialsMetaInput, GraphMeta, } from "@/lib/autogpt-server-api/types"; -import { - Dialog, - DialogContent, - DialogHeader, - DialogTitle, - DialogDescription, -} from "@/components/__legacy__/ui/dialog"; -import { AgentRunDraftView } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view"; interface RunInputDialogProps { isOpen: boolean; @@ -70,21 +64,33 @@ export function RunnerInputDialog({ ); return ( - - - - Run your agent - {graph.name} - - - + { + if (!open) doClose(); + }, + }} + onClose={doClose} + styling={{ + maxWidth: "56rem", + width: "90vw", + }} + > + +
+

{graph.name}

+ +
+
); } From 66f0d97ca2de0578f029a0a1cf6fe685ba17f695 Mon Sep 17 00:00:00 2001 From: Ubbe Date: Tue, 30 Dec 2025 20:21:53 +0700 Subject: [PATCH 21/25] fix(frontend): hide better chat link if not enabled (#11648) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ - Make `` a client component so its rendering is more predictable - Remove the `useMemo()` for the chat link to prevent the flash... - Make sure chat is added to the navbar links only after checking the flag is enabled - Improve logout with `useTransition` - Simplify feature flags setup ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally and test the above --- > [!NOTE] > Ensures the `Chat` nav item is hidden when the feature flag is off across desktop and mobile nav. > > - Inline-filters `loggedInLinks` to skip `Chat` when `Flag.CHAT` is false for both `NavbarLink` rendering and `MobileNavBar` menu items > - Removes `useMemo`/`linksWithChat` helper; maps directly over `loggedInLinks` and filters nulls in mobile, keeping icon mapping intact > - Cleans up unused `useMemo` import > > Written by [Cursor Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit 79c42d87b4adb05155be684e1d0576073e872680. This will update automatically on new commits. Configure [here](https://cursor.com/dashboard?tab=bugbot). --- autogpt_platform/frontend/pnpm-lock.yaml | 2 +- .../src/components/layout/Navbar/Navbar.tsx | 156 +++++++++++++++++- .../components/AccountLogoutOption.tsx | 41 +++-- .../Navbar/components/NavbarLoading.tsx | 9 +- .../layout/Navbar/components/NavbarView.tsx | 144 ---------------- .../src/components/layout/Navbar/data.ts | 25 --- .../services/feature-flags/use-get-flag.ts | 14 +- 7 files changed, 182 insertions(+), 209 deletions(-) delete mode 100644 autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx delete mode 100644 autogpt_platform/frontend/src/components/layout/Navbar/data.ts diff --git a/autogpt_platform/frontend/pnpm-lock.yaml b/autogpt_platform/frontend/pnpm-lock.yaml index 54843fc589..7d39b68468 100644 --- a/autogpt_platform/frontend/pnpm-lock.yaml +++ b/autogpt_platform/frontend/pnpm-lock.yaml @@ -13011,7 +13011,7 @@ snapshots: minimatch: 3.1.2 node-abort-controller: 3.1.1 schema-utils: 3.3.0 - semver: 7.7.2 + semver: 7.7.3 tapable: 2.2.3 typescript: 5.9.3 webpack: 5.101.3(esbuild@0.25.9) diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/Navbar.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/Navbar.tsx index 1441cbfb65..c5e9cabd63 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/Navbar.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/Navbar.tsx @@ -1,13 +1,157 @@ +"use client"; + +import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store"; +import { okData } from "@/app/api/helpers"; +import { IconAutoGPTLogo, IconType } from "@/components/__legacy__/ui/icons"; +import { PreviewBanner } from "@/components/layout/Navbar/components/PreviewBanner/PreviewBanner"; +import { useBreakpoint } from "@/lib/hooks/useBreakpoint"; +import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { environment } from "@/services/environment"; +import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; +import { AccountMenu } from "./components/AccountMenu/AccountMenu"; +import { AgentActivityDropdown } from "./components/AgentActivityDropdown/AgentActivityDropdown"; +import { LoginButton } from "./components/LoginButton"; +import { MobileNavBar } from "./components/MobileNavbar/MobileNavBar"; +import { NavbarLink } from "./components/NavbarLink"; +import { NavbarLoading } from "./components/NavbarLoading"; +import { Wallet } from "./components/Wallet/Wallet"; +import { getAccountMenuItems, loggedInLinks, loggedOutLinks } from "./helpers"; -import { NavbarView } from "./components/NavbarView"; -import { getNavbarAccountData } from "./data"; - -export async function Navbar() { - const { isLoggedIn } = await getNavbarAccountData(); +export function Navbar() { + const { user, isLoggedIn, isUserLoading } = useSupabase(); + const breakpoint = useBreakpoint(); + const isSmallScreen = breakpoint === "sm" || breakpoint === "base"; + const dynamicMenuItems = getAccountMenuItems(user?.role); + const isChatEnabled = useGetFlag(Flag.CHAT); const previewBranchName = environment.getPreviewStealingDev(); + const { data: profile, isLoading: isProfileLoading } = useGetV2GetUserProfile( + { + query: { + select: okData, + enabled: isLoggedIn && !!user, + // Include user ID in query key to ensure cache invalidation when user changes + queryKey: ["/api/store/profile", user?.id], + }, + }, + ); + + const isLoadingProfile = isProfileLoading || isUserLoading; + + const shouldShowPreviewBanner = Boolean(isLoggedIn && previewBranchName); + + const actualLoggedInLinks = + isChatEnabled === true + ? loggedInLinks.concat([{ name: "Chat", href: "/chat" }]) + : loggedInLinks; + + if (isUserLoading) { + return ; + } + return ( - + <> +
+ {shouldShowPreviewBanner && previewBranchName ? ( + + ) : null} + +
+ {/* Mobile Navbar - Adjust positioning */} + <> + {isLoggedIn && isSmallScreen ? ( +
+ + { + if (link.name === "Chat" && !isChatEnabled) { + return null; + } + + return { + icon: + link.name === "Marketplace" + ? IconType.Marketplace + : link.name === "Library" + ? IconType.Library + : link.name === "Build" + ? IconType.Builder + : link.name === "Chat" + ? IconType.Chat + : link.name === "Monitor" + ? IconType.Library + : IconType.LayoutDashboard, + text: link.name, + href: link.href, + }; + }) + .filter((item) => item !== null) as Array<{ + icon: IconType; + text: string; + href: string; + }>, + }, + ...dynamicMenuItems, + ]} + userEmail={profile?.name} + avatarSrc={profile?.avatar_url ?? ""} + /> +
+ ) : null} + + ); } diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AccountMenu/components/AccountLogoutOption.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/AccountMenu/components/AccountLogoutOption.tsx index b0061ec2c9..570f05ca89 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AccountMenu/components/AccountLogoutOption.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AccountMenu/components/AccountLogoutOption.tsx @@ -6,45 +6,42 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { cn } from "@/lib/utils"; import * as Sentry from "@sentry/nextjs"; import { useRouter } from "next/navigation"; -import { useState } from "react"; +import { useTransition } from "react"; export function AccountLogoutOption() { - const [isLoggingOut, setIsLoggingOut] = useState(false); + const [isPending, startTransition] = useTransition(); const supabase = useSupabase(); const router = useRouter(); const { toast } = useToast(); - async function handleLogout() { - setIsLoggingOut(true); - try { - await supabase.logOut(); - router.push("/login"); - } catch (e) { - Sentry.captureException(e); - toast({ - title: "Error logging out", - description: - "Something went wrong when logging out. Please try again. If the problem persists, please contact support.", - variant: "destructive", - }); - } finally { - setTimeout(() => { - setIsLoggingOut(false); - }, 3000); - } + function handleLogout() { + startTransition(async () => { + try { + await supabase.logOut(); + router.replace("/login"); + } catch (e) { + Sentry.captureException(e); + toast({ + title: "Error logging out", + description: + "Something went wrong when logging out. Please try again. If the problem persists, please contact support.", + variant: "destructive", + }); + } + }); } return (
- {isLoggingOut ? ( + {isPending ? ( ) : ( <> diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarLoading.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarLoading.tsx index 42362d24d4..322574fdb0 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarLoading.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarLoading.tsx @@ -5,16 +5,15 @@ export function NavbarLoading() { return ( ); diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx deleted file mode 100644 index 863b9f601f..0000000000 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx +++ /dev/null @@ -1,144 +0,0 @@ -"use client"; - -import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store"; -import { IconAutoGPTLogo, IconType } from "@/components/__legacy__/ui/icons"; -import { PreviewBanner } from "@/components/layout/Navbar/components/PreviewBanner/PreviewBanner"; -import { useBreakpoint } from "@/lib/hooks/useBreakpoint"; -import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; -import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; -import { useMemo } from "react"; -import { okData } from "@/app/api/helpers"; -import { getAccountMenuItems, loggedInLinks, loggedOutLinks } from "../helpers"; -import { AccountMenu } from "./AccountMenu/AccountMenu"; -import { AgentActivityDropdown } from "./AgentActivityDropdown/AgentActivityDropdown"; -import { LoginButton } from "./LoginButton"; -import { MobileNavBar } from "./MobileNavbar/MobileNavBar"; -import { NavbarLink } from "./NavbarLink"; -import { Wallet } from "./Wallet/Wallet"; -interface NavbarViewProps { - isLoggedIn: boolean; - previewBranchName?: string | null; -} - -export function NavbarView({ isLoggedIn, previewBranchName }: NavbarViewProps) { - const { user } = useSupabase(); - const breakpoint = useBreakpoint(); - const isSmallScreen = breakpoint === "sm" || breakpoint === "base"; - const dynamicMenuItems = getAccountMenuItems(user?.role); - const isChatEnabled = useGetFlag(Flag.CHAT); - - const { data: profile, isLoading: isProfileLoading } = useGetV2GetUserProfile( - { - query: { - select: okData, - enabled: isLoggedIn && !!user, - // Include user ID in query key to ensure cache invalidation when user changes - queryKey: ["/api/store/profile", user?.id], - }, - }, - ); - - const { isUserLoading } = useSupabase(); - const isLoadingProfile = isProfileLoading || isUserLoading; - - const linksWithChat = useMemo(() => { - const chatLink = { name: "Chat", href: "/chat" }; - return isChatEnabled ? [...loggedInLinks, chatLink] : loggedInLinks; - }, [isChatEnabled]); - - const shouldShowPreviewBanner = Boolean(isLoggedIn && previewBranchName); - - return ( - <> -
- {shouldShowPreviewBanner && previewBranchName ? ( - - ) : null} - -
- {/* Mobile Navbar - Adjust positioning */} - <> - {isLoggedIn && isSmallScreen ? ( -
- - ({ - icon: - link.name === "Marketplace" - ? IconType.Marketplace - : link.name === "Library" - ? IconType.Library - : link.name === "Build" - ? IconType.Builder - : link.name === "Chat" - ? IconType.Chat - : link.name === "Monitor" - ? IconType.Library - : IconType.LayoutDashboard, - text: link.name, - href: link.href, - })), - }, - ...dynamicMenuItems, - ]} - userEmail={profile?.name} - avatarSrc={profile?.avatar_url ?? ""} - /> -
- ) : null} - - - ); -} diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/data.ts b/autogpt_platform/frontend/src/components/layout/Navbar/data.ts deleted file mode 100644 index 0d07cef78b..0000000000 --- a/autogpt_platform/frontend/src/components/layout/Navbar/data.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { prefetchGetV2GetUserProfileQuery } from "@/app/api/__generated__/endpoints/store/store"; -import { getQueryClient } from "@/lib/react-query/queryClient"; -import { getServerUser } from "@/lib/supabase/server/getServerUser"; - -export async function getNavbarAccountData() { - const { user } = await getServerUser(); - const isLoggedIn = Boolean(user); - const queryClient = getQueryClient(); - - if (!isLoggedIn) { - return { - profile: null, - isLoggedIn, - }; - } - try { - await prefetchGetV2GetUserProfileQuery(queryClient); - } catch (error) { - console.error("Error fetching profile:", error); - } - - return { - isLoggedIn, - }; -} diff --git a/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts b/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts index e80adeb7b5..f05d7c68a4 100644 --- a/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts +++ b/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts @@ -2,7 +2,6 @@ import { DEFAULT_SEARCH_TERMS } from "@/app/(platform)/marketplace/components/HeroSection/helpers"; import { useFlags } from "launchdarkly-react-client-sdk"; -import { environment } from "../environment"; export enum Flag { BETA_BLOCKS = "beta-blocks", @@ -40,7 +39,7 @@ const mockFlags = { [Flag.BETA_BLOCKS]: [], [Flag.NEW_BLOCK_MENU]: false, [Flag.NEW_AGENT_RUNS]: false, - [Flag.GRAPH_SEARCH]: true, + [Flag.GRAPH_SEARCH]: false, [Flag.ENABLE_ENHANCED_OUTPUT_HANDLING]: false, [Flag.NEW_FLOW_EDITOR]: false, [Flag.BUILDER_VIEW_SWITCH]: false, @@ -48,17 +47,20 @@ const mockFlags = { [Flag.AGENT_FAVORITING]: false, [Flag.MARKETPLACE_SEARCH_TERMS]: DEFAULT_SEARCH_TERMS, [Flag.ENABLE_PLATFORM_PAYMENT]: false, - [Flag.CHAT]: true, + [Flag.CHAT]: false, }; export function useGetFlag(flag: T): FlagValues[T] | null { const currentFlags = useFlags(); const flagValue = currentFlags[flag]; - const isCloud = environment.isCloud(); - if ((isPwMockEnabled && !isCloud) || flagValue === undefined) { + const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true"; + const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID; + const isLaunchDarklyConfigured = envEnabled && clientId; + + if (!isLaunchDarklyConfigured || isPwMockEnabled) { return mockFlags[flag]; } - return flagValue; + return flagValue ?? mockFlags[flag]; } From 79d45a15d09f13f695082ef6b180ee589228b6fb Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 30 Dec 2025 12:10:30 -0600 Subject: [PATCH 22/25] feat(platform): Deduplicate insufficient funds Discord + email notifications (#11672) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add Redis-based deduplication for insufficient funds notifications (both Discord alerts and user emails) when users run out of credits. This prevents spamming users and the PRODUCT Discord channel with repeated alerts for the same user+agent combination. ### Changes 🏗️ - **Redis-based deduplication** (`backend/executor/manager.py`): - Add `INSUFFICIENT_FUNDS_NOTIFIED_PREFIX` constant for Redis key prefix - Add `INSUFFICIENT_FUNDS_NOTIFIED_TTL_SECONDS` (30 days) as fallback cleanup - Implement deduplication in `_handle_insufficient_funds_notif` using Redis `SET NX` - Skip both email (`ZERO_BALANCE`) and Discord notifications for duplicate alerts per user+agent - Add `clear_insufficient_funds_notifications(user_id)` function to remove all notification flags for a user - **Clear flags on credit top-up** (`backend/data/credit.py`): - Call `clear_insufficient_funds_notifications` in `_top_up_credits` after successful auto-charge - Call `clear_insufficient_funds_notifications` in `fulfill_checkout` after successful manual top-up - This allows users to receive notifications again if they run out of funds in the future - **Comprehensive test coverage** (`backend/executor/manager_insufficient_funds_test.py`): - Test first-time notification sends both email and Discord alert - Test duplicate notifications are skipped for same user+agent - Test different agents for same user get separate alerts - Test clearing notifications removes all keys for a user - Test handling when no notification keys exist - Test notifications still sent when Redis fails (graceful degradation) ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] First insufficient funds alert sends both email and Discord notification - [x] Duplicate alerts for same user+agent are skipped - [x] Different agents for same user each get their own notification - [x] Topping up credits clears notification flags - [x] Redis failure gracefully falls back to sending notifications - [x] 30-day TTL provides automatic cleanup as fallback - [x] Manually test this works with scheduled agents --- > [!NOTE] > Introduces Redis-backed deduplication for insufficient-funds alerts and resets flags on successful credit additions. > > - **Dedup insufficient-funds alerts** in `executor/manager.py` using Redis `SET NX` with `INSUFFICIENT_FUNDS_NOTIFIED_PREFIX` and 30‑day TTL; skips duplicate ZERO_BALANCE email + Discord alerts per `user_id`+`graph_id`, with graceful fallback if Redis fails. > - **Reset notification flags on credit increases** by adding `clear_insufficient_funds_notifications(user_id)` and invoking it when enabling/adding positive `GRANT`/`TOP_UP` transactions in `data/credit.py`. > - **Tests** (`executor/manager_insufficient_funds_test.py`): first-time vs duplicate behavior, per-agent separation, clearing keys (including no-key and Redis-error cases), and clearing on `_add_transaction`/`_enable_transaction`. > > Written by [Cursor Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit 1a4413b3a1d3951d875b6fc4566619cbe511f15f. This will update automatically on new commits. Configure [here](https://cursor.com/dashboard?tab=bugbot). --------- Co-authored-by: Ubbe Co-authored-by: Claude --- .../backend/backend/data/credit.py | 29 + .../backend/backend/executor/manager.py | 63 ++ .../manager_insufficient_funds_test.py | 560 ++++++++++++++++++ 3 files changed, 652 insertions(+) create mode 100644 autogpt_platform/backend/backend/executor/manager_insufficient_funds_test.py diff --git a/autogpt_platform/backend/backend/data/credit.py b/autogpt_platform/backend/backend/data/credit.py index 95f0b158e1..f3c5365446 100644 --- a/autogpt_platform/backend/backend/data/credit.py +++ b/autogpt_platform/backend/backend/data/credit.py @@ -341,6 +341,19 @@ class UserCreditBase(ABC): if result: # UserBalance is already updated by the CTE + + # Clear insufficient funds notification flags when credits are added + # so user can receive alerts again if they run out in the future. + if transaction.amount > 0 and transaction.type in [ + CreditTransactionType.GRANT, + CreditTransactionType.TOP_UP, + ]: + from backend.executor.manager import ( + clear_insufficient_funds_notifications, + ) + + await clear_insufficient_funds_notifications(user_id) + return result[0]["balance"] async def _add_transaction( @@ -530,6 +543,22 @@ class UserCreditBase(ABC): if result: new_balance, tx_key = result[0]["balance"], result[0]["transactionKey"] # UserBalance is already updated by the CTE + + # Clear insufficient funds notification flags when credits are added + # so user can receive alerts again if they run out in the future. + if ( + amount > 0 + and is_active + and transaction_type + in [CreditTransactionType.GRANT, CreditTransactionType.TOP_UP] + ): + # Lazy import to avoid circular dependency with executor.manager + from backend.executor.manager import ( + clear_insufficient_funds_notifications, + ) + + await clear_insufficient_funds_notifications(user_id) + return new_balance, tx_key # If no result, either user doesn't exist or insufficient balance diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py index 161e68b0d6..75459c5a2a 100644 --- a/autogpt_platform/backend/backend/executor/manager.py +++ b/autogpt_platform/backend/backend/executor/manager.py @@ -114,6 +114,40 @@ utilization_gauge = Gauge( "Ratio of active graph runs to max graph workers", ) +# Redis key prefix for tracking insufficient funds Discord notifications. +# We only send one notification per user per agent until they top up credits. +INSUFFICIENT_FUNDS_NOTIFIED_PREFIX = "insufficient_funds_discord_notified" +# TTL for the notification flag (30 days) - acts as a fallback cleanup +INSUFFICIENT_FUNDS_NOTIFIED_TTL_SECONDS = 30 * 24 * 60 * 60 + + +async def clear_insufficient_funds_notifications(user_id: str) -> int: + """ + Clear all insufficient funds notification flags for a user. + + This should be called when a user tops up their credits, allowing + Discord notifications to be sent again if they run out of funds. + + Args: + user_id: The user ID to clear notifications for. + + Returns: + The number of keys that were deleted. + """ + try: + redis_client = await redis.get_redis_async() + pattern = f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:*" + keys = [key async for key in redis_client.scan_iter(match=pattern)] + if keys: + return await redis_client.delete(*keys) + return 0 + except Exception as e: + logger.warning( + f"Failed to clear insufficient funds notification flags for user " + f"{user_id}: {e}" + ) + return 0 + # Thread-local storage for ExecutionProcessor instances _tls = threading.local() @@ -1261,12 +1295,40 @@ class ExecutionProcessor: graph_id: str, e: InsufficientBalanceError, ): + # Check if we've already sent a notification for this user+agent combo. + # We only send one notification per user per agent until they top up credits. + redis_key = f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:{graph_id}" + try: + redis_client = redis.get_redis() + # SET NX returns True only if the key was newly set (didn't exist) + is_new_notification = redis_client.set( + redis_key, + "1", + nx=True, + ex=INSUFFICIENT_FUNDS_NOTIFIED_TTL_SECONDS, + ) + if not is_new_notification: + # Already notified for this user+agent, skip all notifications + logger.debug( + f"Skipping duplicate insufficient funds notification for " + f"user={user_id}, graph={graph_id}" + ) + return + except Exception as redis_error: + # If Redis fails, log and continue to send the notification + # (better to occasionally duplicate than to never notify) + logger.warning( + f"Failed to check/set insufficient funds notification flag in Redis: " + f"{redis_error}" + ) + shortfall = abs(e.amount) - e.balance metadata = db_client.get_graph_metadata(graph_id) base_url = ( settings.config.frontend_base_url or settings.config.platform_base_url ) + # Queue user email notification queue_notification( NotificationEventModel( user_id=user_id, @@ -1280,6 +1342,7 @@ class ExecutionProcessor: ) ) + # Send Discord system alert try: user_email = db_client.get_user_email_by_id(user_id) diff --git a/autogpt_platform/backend/backend/executor/manager_insufficient_funds_test.py b/autogpt_platform/backend/backend/executor/manager_insufficient_funds_test.py new file mode 100644 index 0000000000..276c9f4f7a --- /dev/null +++ b/autogpt_platform/backend/backend/executor/manager_insufficient_funds_test.py @@ -0,0 +1,560 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from prisma.enums import NotificationType + +from backend.data.notifications import ZeroBalanceData +from backend.executor.manager import ( + INSUFFICIENT_FUNDS_NOTIFIED_PREFIX, + ExecutionProcessor, + clear_insufficient_funds_notifications, +) +from backend.util.exceptions import InsufficientBalanceError +from backend.util.test import SpinTestServer + + +async def async_iter(items): + """Helper to create an async iterator from a list.""" + for item in items: + yield item + + +@pytest.mark.asyncio(loop_scope="session") +async def test_handle_insufficient_funds_sends_discord_alert_first_time( + server: SpinTestServer, +): + """Test that the first insufficient funds notification sends a Discord alert.""" + + execution_processor = ExecutionProcessor() + user_id = "test-user-123" + graph_id = "test-graph-456" + error = InsufficientBalanceError( + message="Insufficient balance", + user_id=user_id, + balance=72, # $0.72 + amount=-714, # Attempting to spend $7.14 + ) + + with patch( + "backend.executor.manager.queue_notification" + ) as mock_queue_notif, patch( + "backend.executor.manager.get_notification_manager_client" + ) as mock_get_client, patch( + "backend.executor.manager.settings" + ) as mock_settings, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Setup mocks + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_settings.config.frontend_base_url = "https://test.com" + + # Mock Redis to simulate first-time notification (set returns True) + mock_redis_client = MagicMock() + mock_redis_module.get_redis.return_value = mock_redis_client + mock_redis_client.set.return_value = True # Key was newly set + + # Create mock database client + mock_db_client = MagicMock() + mock_graph_metadata = MagicMock() + mock_graph_metadata.name = "Test Agent" + mock_db_client.get_graph_metadata.return_value = mock_graph_metadata + mock_db_client.get_user_email_by_id.return_value = "test@example.com" + + # Test the insufficient funds handler + execution_processor._handle_insufficient_funds_notif( + db_client=mock_db_client, + user_id=user_id, + graph_id=graph_id, + e=error, + ) + + # Verify notification was queued + mock_queue_notif.assert_called_once() + notification_call = mock_queue_notif.call_args[0][0] + assert notification_call.type == NotificationType.ZERO_BALANCE + assert notification_call.user_id == user_id + assert isinstance(notification_call.data, ZeroBalanceData) + assert notification_call.data.current_balance == 72 + + # Verify Redis was checked with correct key pattern + expected_key = f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:{graph_id}" + mock_redis_client.set.assert_called_once() + call_args = mock_redis_client.set.call_args + assert call_args[0][0] == expected_key + assert call_args[1]["nx"] is True + + # Verify Discord alert was sent + mock_client.discord_system_alert.assert_called_once() + discord_message = mock_client.discord_system_alert.call_args[0][0] + assert "Insufficient Funds Alert" in discord_message + assert "test@example.com" in discord_message + assert "Test Agent" in discord_message + + +@pytest.mark.asyncio(loop_scope="session") +async def test_handle_insufficient_funds_skips_duplicate_notifications( + server: SpinTestServer, +): + """Test that duplicate insufficient funds notifications skip both email and Discord.""" + + execution_processor = ExecutionProcessor() + user_id = "test-user-123" + graph_id = "test-graph-456" + error = InsufficientBalanceError( + message="Insufficient balance", + user_id=user_id, + balance=72, + amount=-714, + ) + + with patch( + "backend.executor.manager.queue_notification" + ) as mock_queue_notif, patch( + "backend.executor.manager.get_notification_manager_client" + ) as mock_get_client, patch( + "backend.executor.manager.settings" + ) as mock_settings, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Setup mocks + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_settings.config.frontend_base_url = "https://test.com" + + # Mock Redis to simulate duplicate notification (set returns False/None) + mock_redis_client = MagicMock() + mock_redis_module.get_redis.return_value = mock_redis_client + mock_redis_client.set.return_value = None # Key already existed + + # Create mock database client + mock_db_client = MagicMock() + mock_db_client.get_graph_metadata.return_value = MagicMock(name="Test Agent") + + # Test the insufficient funds handler + execution_processor._handle_insufficient_funds_notif( + db_client=mock_db_client, + user_id=user_id, + graph_id=graph_id, + e=error, + ) + + # Verify email notification was NOT queued (deduplication worked) + mock_queue_notif.assert_not_called() + + # Verify Discord alert was NOT sent (deduplication worked) + mock_client.discord_system_alert.assert_not_called() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_handle_insufficient_funds_different_agents_get_separate_alerts( + server: SpinTestServer, +): + """Test that different agents for the same user get separate Discord alerts.""" + + execution_processor = ExecutionProcessor() + user_id = "test-user-123" + graph_id_1 = "test-graph-111" + graph_id_2 = "test-graph-222" + + error = InsufficientBalanceError( + message="Insufficient balance", + user_id=user_id, + balance=72, + amount=-714, + ) + + with patch("backend.executor.manager.queue_notification"), patch( + "backend.executor.manager.get_notification_manager_client" + ) as mock_get_client, patch( + "backend.executor.manager.settings" + ) as mock_settings, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_settings.config.frontend_base_url = "https://test.com" + + mock_redis_client = MagicMock() + mock_redis_module.get_redis.return_value = mock_redis_client + # Both calls return True (first time for each agent) + mock_redis_client.set.return_value = True + + mock_db_client = MagicMock() + mock_graph_metadata = MagicMock() + mock_graph_metadata.name = "Test Agent" + mock_db_client.get_graph_metadata.return_value = mock_graph_metadata + mock_db_client.get_user_email_by_id.return_value = "test@example.com" + + # First agent notification + execution_processor._handle_insufficient_funds_notif( + db_client=mock_db_client, + user_id=user_id, + graph_id=graph_id_1, + e=error, + ) + + # Second agent notification + execution_processor._handle_insufficient_funds_notif( + db_client=mock_db_client, + user_id=user_id, + graph_id=graph_id_2, + e=error, + ) + + # Verify Discord alerts were sent for both agents + assert mock_client.discord_system_alert.call_count == 2 + + # Verify Redis was called with different keys + assert mock_redis_client.set.call_count == 2 + calls = mock_redis_client.set.call_args_list + assert ( + calls[0][0][0] + == f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:{graph_id_1}" + ) + assert ( + calls[1][0][0] + == f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:{graph_id_2}" + ) + + +@pytest.mark.asyncio(loop_scope="session") +async def test_clear_insufficient_funds_notifications(server: SpinTestServer): + """Test that clearing notifications removes all keys for a user.""" + + user_id = "test-user-123" + + with patch("backend.executor.manager.redis") as mock_redis_module: + + mock_redis_client = MagicMock() + # get_redis_async is an async function, so we need AsyncMock for it + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + + # Mock scan_iter to return some keys as an async iterator + mock_keys = [ + f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:graph-1", + f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:graph-2", + f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:graph-3", + ] + mock_redis_client.scan_iter.return_value = async_iter(mock_keys) + # delete is awaited, so use AsyncMock + mock_redis_client.delete = AsyncMock(return_value=3) + + # Clear notifications + result = await clear_insufficient_funds_notifications(user_id) + + # Verify correct pattern was used + expected_pattern = f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:*" + mock_redis_client.scan_iter.assert_called_once_with(match=expected_pattern) + + # Verify delete was called with all keys + mock_redis_client.delete.assert_called_once_with(*mock_keys) + + # Verify return value + assert result == 3 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_clear_insufficient_funds_notifications_no_keys(server: SpinTestServer): + """Test clearing notifications when there are no keys to clear.""" + + user_id = "test-user-no-notifications" + + with patch("backend.executor.manager.redis") as mock_redis_module: + + mock_redis_client = MagicMock() + # get_redis_async is an async function, so we need AsyncMock for it + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + + # Mock scan_iter to return no keys as an async iterator + mock_redis_client.scan_iter.return_value = async_iter([]) + + # Clear notifications + result = await clear_insufficient_funds_notifications(user_id) + + # Verify delete was not called + mock_redis_client.delete.assert_not_called() + + # Verify return value + assert result == 0 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_clear_insufficient_funds_notifications_handles_redis_error( + server: SpinTestServer, +): + """Test that clearing notifications handles Redis errors gracefully.""" + + user_id = "test-user-redis-error" + + with patch("backend.executor.manager.redis") as mock_redis_module: + + # Mock get_redis_async to raise an error + mock_redis_module.get_redis_async = AsyncMock( + side_effect=Exception("Redis connection failed") + ) + + # Clear notifications should not raise, just return 0 + result = await clear_insufficient_funds_notifications(user_id) + + # Verify it returned 0 (graceful failure) + assert result == 0 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_handle_insufficient_funds_continues_on_redis_error( + server: SpinTestServer, +): + """Test that both email and Discord notifications are still sent when Redis fails.""" + + execution_processor = ExecutionProcessor() + user_id = "test-user-123" + graph_id = "test-graph-456" + error = InsufficientBalanceError( + message="Insufficient balance", + user_id=user_id, + balance=72, + amount=-714, + ) + + with patch( + "backend.executor.manager.queue_notification" + ) as mock_queue_notif, patch( + "backend.executor.manager.get_notification_manager_client" + ) as mock_get_client, patch( + "backend.executor.manager.settings" + ) as mock_settings, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_settings.config.frontend_base_url = "https://test.com" + + # Mock Redis to raise an error + mock_redis_client = MagicMock() + mock_redis_module.get_redis.return_value = mock_redis_client + mock_redis_client.set.side_effect = Exception("Redis connection error") + + mock_db_client = MagicMock() + mock_graph_metadata = MagicMock() + mock_graph_metadata.name = "Test Agent" + mock_db_client.get_graph_metadata.return_value = mock_graph_metadata + mock_db_client.get_user_email_by_id.return_value = "test@example.com" + + # Test the insufficient funds handler + execution_processor._handle_insufficient_funds_notif( + db_client=mock_db_client, + user_id=user_id, + graph_id=graph_id, + e=error, + ) + + # Verify email notification was still queued despite Redis error + mock_queue_notif.assert_called_once() + + # Verify Discord alert was still sent despite Redis error + mock_client.discord_system_alert.assert_called_once() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_add_transaction_clears_notifications_on_grant(server: SpinTestServer): + """Test that _add_transaction clears notification flags when adding GRANT credits.""" + from prisma.enums import CreditTransactionType + + from backend.data.credit import UserCredit + + user_id = "test-user-grant-clear" + + with patch("backend.data.credit.query_raw_with_schema") as mock_query, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Mock the query to return a successful transaction + mock_query.return_value = [{"balance": 1000, "transactionKey": "test-tx-key"}] + + # Mock async Redis for notification clearing + mock_redis_client = MagicMock() + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + mock_redis_client.scan_iter.return_value = async_iter( + [f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:graph-1"] + ) + mock_redis_client.delete = AsyncMock(return_value=1) + + # Create a concrete instance + credit_model = UserCredit() + + # Call _add_transaction with GRANT type (should clear notifications) + await credit_model._add_transaction( + user_id=user_id, + amount=500, # Positive amount + transaction_type=CreditTransactionType.GRANT, + is_active=True, # Active transaction + ) + + # Verify notification clearing was called + mock_redis_module.get_redis_async.assert_called_once() + mock_redis_client.scan_iter.assert_called_once_with( + match=f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:*" + ) + + +@pytest.mark.asyncio(loop_scope="session") +async def test_add_transaction_clears_notifications_on_top_up(server: SpinTestServer): + """Test that _add_transaction clears notification flags when adding TOP_UP credits.""" + from prisma.enums import CreditTransactionType + + from backend.data.credit import UserCredit + + user_id = "test-user-topup-clear" + + with patch("backend.data.credit.query_raw_with_schema") as mock_query, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Mock the query to return a successful transaction + mock_query.return_value = [{"balance": 2000, "transactionKey": "test-tx-key-2"}] + + # Mock async Redis for notification clearing + mock_redis_client = MagicMock() + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + mock_redis_client.scan_iter.return_value = async_iter([]) + mock_redis_client.delete = AsyncMock(return_value=0) + + credit_model = UserCredit() + + # Call _add_transaction with TOP_UP type (should clear notifications) + await credit_model._add_transaction( + user_id=user_id, + amount=1000, # Positive amount + transaction_type=CreditTransactionType.TOP_UP, + is_active=True, + ) + + # Verify notification clearing was attempted + mock_redis_module.get_redis_async.assert_called_once() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_add_transaction_skips_clearing_for_inactive_transaction( + server: SpinTestServer, +): + """Test that _add_transaction does NOT clear notifications for inactive transactions.""" + from prisma.enums import CreditTransactionType + + from backend.data.credit import UserCredit + + user_id = "test-user-inactive" + + with patch("backend.data.credit.query_raw_with_schema") as mock_query, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Mock the query to return a successful transaction + mock_query.return_value = [{"balance": 500, "transactionKey": "test-tx-key-3"}] + + # Mock async Redis + mock_redis_client = MagicMock() + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + + credit_model = UserCredit() + + # Call _add_transaction with is_active=False (should NOT clear notifications) + await credit_model._add_transaction( + user_id=user_id, + amount=500, + transaction_type=CreditTransactionType.TOP_UP, + is_active=False, # Inactive - pending Stripe payment + ) + + # Verify notification clearing was NOT called + mock_redis_module.get_redis_async.assert_not_called() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_add_transaction_skips_clearing_for_usage_transaction( + server: SpinTestServer, +): + """Test that _add_transaction does NOT clear notifications for USAGE transactions.""" + from prisma.enums import CreditTransactionType + + from backend.data.credit import UserCredit + + user_id = "test-user-usage" + + with patch("backend.data.credit.query_raw_with_schema") as mock_query, patch( + "backend.executor.manager.redis" + ) as mock_redis_module: + + # Mock the query to return a successful transaction + mock_query.return_value = [{"balance": 400, "transactionKey": "test-tx-key-4"}] + + # Mock async Redis + mock_redis_client = MagicMock() + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + + credit_model = UserCredit() + + # Call _add_transaction with USAGE type (spending, should NOT clear) + await credit_model._add_transaction( + user_id=user_id, + amount=-100, # Negative - spending credits + transaction_type=CreditTransactionType.USAGE, + is_active=True, + ) + + # Verify notification clearing was NOT called + mock_redis_module.get_redis_async.assert_not_called() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_enable_transaction_clears_notifications(server: SpinTestServer): + """Test that _enable_transaction clears notification flags when enabling a TOP_UP.""" + from prisma.enums import CreditTransactionType + + from backend.data.credit import UserCredit + + user_id = "test-user-enable" + + with patch("backend.data.credit.CreditTransaction") as mock_credit_tx, patch( + "backend.data.credit.query_raw_with_schema" + ) as mock_query, patch("backend.executor.manager.redis") as mock_redis_module: + + # Mock finding the pending transaction + mock_transaction = MagicMock() + mock_transaction.amount = 1000 + mock_transaction.type = CreditTransactionType.TOP_UP + mock_credit_tx.prisma.return_value.find_first = AsyncMock( + return_value=mock_transaction + ) + + # Mock the query to return updated balance + mock_query.return_value = [{"balance": 1500}] + + # Mock async Redis for notification clearing + mock_redis_client = MagicMock() + mock_redis_module.get_redis_async = AsyncMock(return_value=mock_redis_client) + mock_redis_client.scan_iter.return_value = async_iter( + [f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:graph-1"] + ) + mock_redis_client.delete = AsyncMock(return_value=1) + + credit_model = UserCredit() + + # Call _enable_transaction (simulates Stripe checkout completion) + from backend.util.json import SafeJson + + await credit_model._enable_transaction( + transaction_key="cs_test_123", + user_id=user_id, + metadata=SafeJson({"payment": "completed"}), + ) + + # Verify notification clearing was called + mock_redis_module.get_redis_async.assert_called_once() + mock_redis_client.scan_iter.assert_called_once_with( + match=f"{INSUFFICIENT_FUNDS_NOTIFIED_PREFIX}:{user_id}:*" + ) From fba61c72ed64bc5dc527392ffbd667e8e278cea2 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Wed, 31 Dec 2025 15:16:12 +0530 Subject: [PATCH 23/25] feat(frontend): fix duplicate publish button and improve BuilderActionButton styling (#11669) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes duplicate "Publish to Marketplace" buttons in the builder by adding a `showTrigger` prop to control modal trigger visibility. Screenshot 2025-12-23 at 8 18 58 AM ### Changes 🏗️ **BuilderActionButton.tsx** - Removed borders on hover and active states for a cleaner visual appearance - Added `hover:border-none` and `active:border-none` to maintain consistent styling during interactions **PublishToMarketplace.tsx** - Pass `showTrigger={false}` to `PublishAgentModal` to hide the default trigger button - This prevents duplicate buttons when a custom trigger is already rendered **PublishAgentModal.tsx** - Added `showTrigger` prop (defaults to `true`) to conditionally render the modal trigger - Allows parent components to control whether the built-in trigger button should be displayed - Maintains backward compatibility with existing usage ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Verify only one "Publish to Marketplace" button appears in the builder - [x] Confirm button hover/active states display correctly without border artifacts - [x] Verify modal can still be triggered programmatically without the trigger button --- .../BuilderActions/components/BuilderActionButton.tsx | 4 ++-- .../PublishToMarketplace/PublishToMarketplace.tsx | 1 + .../contextual/PublishAgentModal/PublishAgentModal.tsx | 9 ++++++--- .../contextual/PublishAgentModal/usePublishAgentModal.ts | 1 + 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx index f8b3f1051e..549b432a38 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx @@ -19,8 +19,8 @@ export const BuilderActionButton = ({ "border border-zinc-200", "shadow-[inset_0_3px_0_0_rgba(255,255,255,0.5),0_2px_4px_0_rgba(0,0,0,0.2)]", "dark:shadow-[inset_0_1px_0_0_rgba(255,255,255,0.1),0_2px_4px_0_rgba(0,0,0,0.4)]", - "hover:shadow-[inset_0_1px_0_0_rgba(255,255,255,0.5),0_1px_2px_0_rgba(0,0,0,0.2)]", - "active:shadow-[inset_0_2px_4px_0_rgba(0,0,0,0.2)]", + "hover:border-none hover:shadow-[inset_0_1px_0_0_rgba(255,255,255,0.5),0_1px_2px_0_rgba(0,0,0,0.2)]", + "active:border-none active:shadow-[inset_0_2px_4px_0_rgba(0,0,0,0.2)]", "transition-all duration-150", "disabled:cursor-not-allowed disabled:opacity-50", className, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx index 1e6545dfbd..500b8f0b47 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx @@ -30,6 +30,7 @@ export const PublishToMarketplace = ({ flowID }: { flowID: string | null }) => { targetState={publishState} onStateChange={handleStateChange} preSelectedAgentId={flowID || undefined} + showTrigger={false} /> ); diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx index dd91094f9c..da3324f600 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx @@ -20,6 +20,7 @@ export function PublishAgentModal({ onStateChange, preSelectedAgentId, preSelectedAgentVersion, + showTrigger = true, }: Props) { const { // State @@ -121,9 +122,11 @@ export function PublishAgentModal({ }, }} > - - {trigger || } - + {showTrigger && ( + + {trigger || } + + )}
{renderContent()}
diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts index f83698d8e7..0f8a819c6e 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts @@ -30,6 +30,7 @@ export interface Props { onStateChange?: (state: PublishState) => void; preSelectedAgentId?: string; preSelectedAgentVersion?: number; + showTrigger?: boolean; } export function usePublishAgentModal({ From 290d0d9a9beaccb74221540bd32363bf73743172 Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Wed, 31 Dec 2025 20:19:53 +0530 Subject: [PATCH 24/25] feat(frontend): add auto-save Draft Recovery feature with IndexedDB persistence (#11658) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Implements an auto-save draft recovery system that persists unsaved flow builder state across browser sessions, tab closures, and refreshes. When users return to a flow with unsaved changes, they can choose to restore or discard the draft via an intuitive recovery popup. https://github.com/user-attachments/assets/0f77173b-7834-48d2-b7aa-73c6cd2eaff6 ## Changes 🏗️ ### Core Features - **Draft Recovery Popup** (`DraftRecoveryPopup.tsx`) - Displays amber-themed notification with unsaved changes metadata - Shows node count, edge count, and relative time since last save - Provides restore and discard actions with tooltips - Auto-dismisses on click outside or ESC key - **Auto-Save System** (`useDraftManager.ts`) - Automatically saves draft state every 15 seconds - Saves on browser tab close/refresh via `beforeunload` - Tracks nodes, edges, graph schemas, node counter, and flow version - Smart dirty checking - only saves when actual changes detected - Cleans up expired drafts (24-hour TTL) - **IndexedDB Persistence** (`db.ts`, `draft-service.ts`) - Uses Dexie library for reliable client-side storage - Handles both existing flows (by flowID) and new flows (via temp session IDs) - Compares draft state with current state to determine if recovery needed - Automatically clears drafts after successful save ### Integration Changes - **Flow Editor** (`Flow.tsx`) - Integrated `DraftRecoveryPopup` component - Passes `isInitialLoadComplete` state for proper timing - **useFlow Hook** (`useFlow.ts`) - Added `isInitialLoadComplete` state to track when flow is ready - Ensures draft check happens after initial graph load - Resets state on flow/version changes - **useCopyPaste Hook** (`useCopyPaste.ts`) - Refactored to manage keyboard event listeners internally - Simplified integration by removing external event handler setup - **useSaveGraph Hook** (`useSaveGraph.ts`) - Clears draft after successful save (both create and update) - Removes temp flow ID from session storage on first save ### Dependencies - Added `dexie@4.2.1` - Modern IndexedDB wrapper for reliable client-side storage ## Technical Details **Auto-Save Flow:** 1. User makes changes to nodes/edges 2. Change triggers 15-second debounced save 3. Draft saved to IndexedDB with timestamp 4. On save, current state compared with last saved state 5. Only saves if meaningful changes detected **Recovery Flow:** 1. User loads flow/refreshes page 2. After initial load completes, check for existing draft 3. Compare draft with current state 4. If different and non-empty, show recovery popup 5. User chooses to restore or discard 6. Draft cleared after either action **Session Management:** - Existing flows: Use actual flowID for draft key ### Test Plan 🧪 - [x] Create a new flow with 3+ blocks and connections, wait 15+ seconds, then refresh the page - verify recovery popup appears with correct counts and restoring works - [x] Create a flow with blocks, refresh, then click "Discard" button on recovery popup - verify popup disappears and draft is deleted - [x] Add blocks to a flow, save successfully - verify draft is cleared from IndexedDB (check DevTools > Application > IndexedDB) - [x] Make changes to an existing flow, refresh page - verify recovery popup shows and restoring preserves all changes correctly - [x] Verify empty flows (0 nodes) don't trigger recovery popup or save drafts --- autogpt_platform/frontend/package.json | 1 + autogpt_platform/frontend/pnpm-lock.yaml | 8 + .../DraftRecoveryPopup.tsx | 118 +++++++ .../useDraftRecoveryPopup.tsx | 61 ++++ .../build/components/FlowEditor/Flow/Flow.tsx | 28 +- .../FlowEditor/Flow/useCopyPaste.ts | 13 +- .../FlowEditor/Flow/useDraftManager.ts | 300 ++++++++++++++++++ .../components/FlowEditor/Flow/useFlow.ts | 15 + .../(platform)/build/hooks/useSaveGraph.ts | 22 +- autogpt_platform/frontend/src/lib/dexie/db.ts | 46 +++ .../frontend/src/lib/dexie/draft-utils.ts | 33 ++ .../services/builder-draft/draft-service.ts | 118 +++++++ 12 files changed, 745 insertions(+), 18 deletions(-) create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/DraftRecoveryPopup.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/useDraftRecoveryPopup.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useDraftManager.ts create mode 100644 autogpt_platform/frontend/src/lib/dexie/db.ts create mode 100644 autogpt_platform/frontend/src/lib/dexie/draft-utils.ts create mode 100644 autogpt_platform/frontend/src/services/builder-draft/draft-service.ts diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index 4cbd867cd8..1708ac9053 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -69,6 +69,7 @@ "cmdk": "1.1.1", "cookie": "1.0.2", "date-fns": "4.1.0", + "dexie": "4.2.1", "dotenv": "17.2.3", "elliptic": "6.6.1", "embla-carousel-react": "8.6.0", diff --git a/autogpt_platform/frontend/pnpm-lock.yaml b/autogpt_platform/frontend/pnpm-lock.yaml index 7d39b68468..355ffff129 100644 --- a/autogpt_platform/frontend/pnpm-lock.yaml +++ b/autogpt_platform/frontend/pnpm-lock.yaml @@ -131,6 +131,9 @@ importers: date-fns: specifier: 4.1.0 version: 4.1.0 + dexie: + specifier: 4.2.1 + version: 4.2.1 dotenv: specifier: 17.2.3 version: 17.2.3 @@ -4428,6 +4431,9 @@ packages: devlop@1.1.0: resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + dexie@4.2.1: + resolution: {integrity: sha512-Ckej0NS6jxQ4Po3OrSQBFddayRhTCic2DoCAG5zacOfOVB9P2Q5Xc5uL/nVa7ZVs+HdMnvUPzLFCB/JwpB6Csg==} + didyoumean@1.2.2: resolution: {integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==} @@ -12323,6 +12329,8 @@ snapshots: dependencies: dequal: 2.0.3 + dexie@4.2.1: {} + didyoumean@1.2.2: {} diffie-hellman@5.0.3: diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/DraftRecoveryPopup.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/DraftRecoveryPopup.tsx new file mode 100644 index 0000000000..520addd50f --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/DraftRecoveryPopup.tsx @@ -0,0 +1,118 @@ +"use client"; + +import { Button } from "@/components/atoms/Button/Button"; +import { ClockCounterClockwiseIcon, XIcon } from "@phosphor-icons/react"; +import { cn } from "@/lib/utils"; +import { formatTimeAgo } from "@/lib/utils/time"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; +import { useDraftRecoveryPopup } from "./useDraftRecoveryPopup"; +import { Text } from "@/components/atoms/Text/Text"; +import { AnimatePresence, motion } from "framer-motion"; + +interface DraftRecoveryPopupProps { + isInitialLoadComplete: boolean; +} + +export function DraftRecoveryPopup({ + isInitialLoadComplete, +}: DraftRecoveryPopupProps) { + const { isOpen, popupRef, nodeCount, edgeCount, savedAt, onLoad, onDiscard } = + useDraftRecoveryPopup(isInitialLoadComplete); + + return ( + + {isOpen && ( + +
+
+ +
+ +
+ + Unsaved changes found + + + {nodeCount} block{nodeCount !== 1 ? "s" : ""}, {edgeCount}{" "} + connection + {edgeCount !== 1 ? "s" : ""} •{" "} + {formatTimeAgo(new Date(savedAt).toISOString())} + +
+ +
+ + + + + Restore changes + + + + + + Discard changes + +
+
+
+ )} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/useDraftRecoveryPopup.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/useDraftRecoveryPopup.tsx new file mode 100644 index 0000000000..0914b04952 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/DraftRecoveryDialog/useDraftRecoveryPopup.tsx @@ -0,0 +1,61 @@ +import { useEffect, useRef } from "react"; +import { useDraftManager } from "../FlowEditor/Flow/useDraftManager"; + +export const useDraftRecoveryPopup = (isInitialLoadComplete: boolean) => { + const popupRef = useRef(null); + + const { + isRecoveryOpen: isOpen, + savedAt, + nodeCount, + edgeCount, + loadDraft: onLoad, + discardDraft: onDiscard, + } = useDraftManager(isInitialLoadComplete); + + useEffect(() => { + if (!isOpen) return; + + const handleClickOutside = (event: MouseEvent) => { + if ( + popupRef.current && + !popupRef.current.contains(event.target as Node) + ) { + onDiscard(); + } + }; + + const timeoutId = setTimeout(() => { + document.addEventListener("mousedown", handleClickOutside); + }, 100); + + return () => { + clearTimeout(timeoutId); + document.removeEventListener("mousedown", handleClickOutside); + }; + }, [isOpen, onDiscard]); + + useEffect(() => { + if (!isOpen) return; + + const handleKeyDown = (event: KeyboardEvent) => { + if (event.key === "Escape") { + onDiscard(); + } + }; + + document.addEventListener("keydown", handleKeyDown); + return () => { + document.removeEventListener("keydown", handleKeyDown); + }; + }, [isOpen, onDiscard]); + return { + popupRef, + isOpen, + nodeCount, + edgeCount, + savedAt, + onLoad, + onDiscard, + }; +}; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx index c9cf5296c6..4c6796d746 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx @@ -4,7 +4,7 @@ import CustomEdge from "../edges/CustomEdge"; import { useFlow } from "./useFlow"; import { useShallow } from "zustand/react/shallow"; import { useNodeStore } from "../../../stores/nodeStore"; -import { useMemo, useEffect, useCallback } from "react"; +import { useMemo, useCallback } from "react"; import { CustomNode } from "../nodes/CustomNode/CustomNode"; import { useCustomEdge } from "../edges/useCustomEdge"; import { useFlowRealtime } from "./useFlowRealtime"; @@ -21,6 +21,7 @@ import { okData } from "@/app/api/helpers"; import { TriggerAgentBanner } from "./components/TriggerAgentBanner"; import { resolveCollisions } from "./helpers/resolve-collision"; import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle"; +import { DraftRecoveryPopup } from "../../DraftRecoveryDialog/DraftRecoveryPopup"; export const Flow = () => { const [{ flowID, flowExecutionID }] = useQueryStates({ @@ -60,26 +61,22 @@ export const Flow = () => { }, [setNodes, nodes]); const { edges, onConnect, onEdgesChange } = useCustomEdge(); - // We use this hook to load the graph and convert them into custom nodes and edges. - const { onDragOver, onDrop, isFlowContentLoading, isLocked, setIsLocked } = - useFlow(); + // for loading purpose + const { + onDragOver, + onDrop, + isFlowContentLoading, + isInitialLoadComplete, + isLocked, + setIsLocked, + } = useFlow(); // This hook is used for websocket realtime updates. useFlowRealtime(); // Copy/paste functionality - const handleCopyPaste = useCopyPaste(); + useCopyPaste(); - useEffect(() => { - const handleKeyDown = (event: KeyboardEvent) => { - handleCopyPaste(event); - }; - - window.addEventListener("keydown", handleKeyDown); - return () => { - window.removeEventListener("keydown", handleKeyDown); - }; - }, [handleCopyPaste]); const isGraphRunning = useGraphStore( useShallow((state) => state.isGraphRunning), ); @@ -115,6 +112,7 @@ export const Flow = () => { className="right-2 top-32 p-2" /> )} +
{/* TODO: Need to update it in future - also do not send executionId as prop - rather use useQueryState inside the component */} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useCopyPaste.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useCopyPaste.ts index 7a8213da22..c6c54006d4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useCopyPaste.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useCopyPaste.ts @@ -1,4 +1,4 @@ -import { useCallback } from "react"; +import { useCallback, useEffect } from "react"; import { useReactFlow } from "@xyflow/react"; import { v4 as uuidv4 } from "uuid"; import { useNodeStore } from "../../../stores/nodeStore"; @@ -151,5 +151,16 @@ export function useCopyPaste() { [getViewport, toast], ); + useEffect(() => { + const handleKeyDown = (event: KeyboardEvent) => { + handleCopyPaste(event); + }; + + window.addEventListener("keydown", handleKeyDown); + return () => { + window.removeEventListener("keydown", handleKeyDown); + }; + }, [handleCopyPaste]); + return handleCopyPaste; } diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useDraftManager.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useDraftManager.ts new file mode 100644 index 0000000000..f6d03923bd --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useDraftManager.ts @@ -0,0 +1,300 @@ +import { useState, useCallback, useEffect, useRef } from "react"; +import { parseAsString, parseAsInteger, useQueryStates } from "nuqs"; +import { + draftService, + getTempFlowId, + getOrCreateTempFlowId, + DraftData, +} from "@/services/builder-draft/draft-service"; +import { BuilderDraft } from "@/lib/dexie/db"; +import { cleanNodes, cleanEdges } from "@/lib/dexie/draft-utils"; +import { useNodeStore } from "../../../stores/nodeStore"; +import { useEdgeStore } from "../../../stores/edgeStore"; +import { useGraphStore } from "../../../stores/graphStore"; +import { useHistoryStore } from "../../../stores/historyStore"; +import isEqual from "lodash/isEqual"; + +const AUTO_SAVE_INTERVAL_MS = 15000; // 15 seconds + +interface DraftRecoveryState { + isOpen: boolean; + draft: BuilderDraft | null; +} + +/** + * Consolidated hook for draft persistence and recovery + * - Auto-saves builder state every 15 seconds + * - Saves on beforeunload event + * - Checks for and manages unsaved drafts on load + */ +export function useDraftManager(isInitialLoadComplete: boolean) { + const [state, setState] = useState({ + isOpen: false, + draft: null, + }); + + const [{ flowID, flowVersion }] = useQueryStates({ + flowID: parseAsString, + flowVersion: parseAsInteger, + }); + + const lastSavedStateRef = useRef(null); + const saveTimeoutRef = useRef(null); + const isDirtyRef = useRef(false); + const hasCheckedForDraft = useRef(false); + + const getEffectiveFlowId = useCallback((): string => { + return flowID || getOrCreateTempFlowId(); + }, [flowID]); + + const getCurrentState = useCallback((): DraftData => { + const nodes = useNodeStore.getState().nodes; + const edges = useEdgeStore.getState().edges; + const nodeCounter = useNodeStore.getState().nodeCounter; + const graphStore = useGraphStore.getState(); + + return { + nodes, + edges, + graphSchemas: { + input: graphStore.inputSchema, + credentials: graphStore.credentialsInputSchema, + output: graphStore.outputSchema, + }, + nodeCounter, + flowVersion: flowVersion ?? undefined, + }; + }, [flowVersion]); + + const cleanStateForComparison = useCallback((stateData: DraftData) => { + return { + nodes: cleanNodes(stateData.nodes), + edges: cleanEdges(stateData.edges), + }; + }, []); + + const hasChanges = useCallback((): boolean => { + const currentState = getCurrentState(); + + if (!lastSavedStateRef.current) { + return currentState.nodes.length > 0; + } + + const currentClean = cleanStateForComparison(currentState); + const lastClean = cleanStateForComparison(lastSavedStateRef.current); + + return !isEqual(currentClean, lastClean); + }, [getCurrentState, cleanStateForComparison]); + + const saveDraft = useCallback(async () => { + const effectiveFlowId = getEffectiveFlowId(); + const currentState = getCurrentState(); + + if (currentState.nodes.length === 0 && currentState.edges.length === 0) { + return; + } + + if (!hasChanges()) { + return; + } + + try { + await draftService.saveDraft(effectiveFlowId, currentState); + lastSavedStateRef.current = currentState; + isDirtyRef.current = false; + } catch (error) { + console.error("[DraftPersistence] Failed to save draft:", error); + } + }, [getEffectiveFlowId, getCurrentState, hasChanges]); + + const scheduleSave = useCallback(() => { + isDirtyRef.current = true; + + if (saveTimeoutRef.current) { + clearTimeout(saveTimeoutRef.current); + } + + saveTimeoutRef.current = setTimeout(() => { + saveDraft(); + }, AUTO_SAVE_INTERVAL_MS); + }, [saveDraft]); + + useEffect(() => { + const unsubscribeNodes = useNodeStore.subscribe((storeState, prevState) => { + if (storeState.nodes !== prevState.nodes) { + scheduleSave(); + } + }); + + const unsubscribeEdges = useEdgeStore.subscribe((storeState, prevState) => { + if (storeState.edges !== prevState.edges) { + scheduleSave(); + } + }); + + return () => { + unsubscribeNodes(); + unsubscribeEdges(); + }; + }, [scheduleSave]); + + useEffect(() => { + const handleBeforeUnload = () => { + if (isDirtyRef.current) { + const effectiveFlowId = getEffectiveFlowId(); + const currentState = getCurrentState(); + + if ( + currentState.nodes.length === 0 && + currentState.edges.length === 0 + ) { + return; + } + + draftService.saveDraft(effectiveFlowId, currentState).catch(() => { + // Ignore errors on unload + }); + } + }; + + window.addEventListener("beforeunload", handleBeforeUnload); + return () => { + window.removeEventListener("beforeunload", handleBeforeUnload); + }; + }, [getEffectiveFlowId, getCurrentState]); + + useEffect(() => { + return () => { + if (saveTimeoutRef.current) { + clearTimeout(saveTimeoutRef.current); + } + if (isDirtyRef.current) { + saveDraft(); + } + }; + }, [saveDraft]); + + useEffect(() => { + draftService.cleanupExpired().catch((error) => { + console.error( + "[DraftPersistence] Failed to cleanup expired drafts:", + error, + ); + }); + }, []); + + const checkForDraft = useCallback(async () => { + const effectiveFlowId = flowID || getTempFlowId(); + + if (!effectiveFlowId) { + return; + } + + try { + const draft = await draftService.loadDraft(effectiveFlowId); + + if (!draft) { + return; + } + + const currentNodes = useNodeStore.getState().nodes; + const currentEdges = useEdgeStore.getState().edges; + + const isDifferent = draftService.isDraftDifferent( + draft, + currentNodes, + currentEdges, + ); + + if (isDifferent && (draft.nodes.length > 0 || draft.edges.length > 0)) { + setState({ + isOpen: true, + draft, + }); + } else { + await draftService.deleteDraft(effectiveFlowId); + } + } catch (error) { + console.error("[DraftRecovery] Failed to check for draft:", error); + } + }, [flowID]); + + useEffect(() => { + if (isInitialLoadComplete && !hasCheckedForDraft.current) { + hasCheckedForDraft.current = true; + checkForDraft(); + } + }, [isInitialLoadComplete, checkForDraft]); + + useEffect(() => { + hasCheckedForDraft.current = false; + setState({ + isOpen: false, + draft: null, + }); + }, [flowID]); + + const loadDraft = useCallback(async () => { + if (!state.draft) return; + + const { draft } = state; + + try { + useNodeStore.getState().setNodes(draft.nodes); + useEdgeStore.getState().setEdges(draft.edges); + + // Restore nodeCounter to prevent ID conflicts when adding new nodes + if (draft.nodeCounter !== undefined) { + useNodeStore.setState({ nodeCounter: draft.nodeCounter }); + } + + if (draft.graphSchemas) { + useGraphStore + .getState() + .setGraphSchemas( + draft.graphSchemas.input as Record | null, + draft.graphSchemas.credentials as Record | null, + draft.graphSchemas.output as Record | null, + ); + } + + setTimeout(() => { + useHistoryStore.getState().initializeHistory(); + }, 100); + + await draftService.deleteDraft(draft.id); + + setState({ + isOpen: false, + draft: null, + }); + } catch (error) { + console.error("[DraftRecovery] Failed to load draft:", error); + } + }, [state.draft]); + + const discardDraft = useCallback(async () => { + if (!state.draft) { + setState({ isOpen: false, draft: null }); + return; + } + + try { + await draftService.deleteDraft(state.draft.id); + } catch (error) { + console.error("[DraftRecovery] Failed to discard draft:", error); + } + + setState({ isOpen: false, draft: null }); + }, [state.draft]); + + return { + // Recovery popup props + isRecoveryOpen: state.isOpen, + savedAt: state.draft?.savedAt ?? 0, + nodeCount: state.draft?.nodes.length ?? 0, + edgeCount: state.draft?.edges.length ?? 0, + loadDraft, + discardDraft, + }; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts index be76c4ec2b..7514611f08 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/useFlow.ts @@ -21,6 +21,7 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut export const useFlow = () => { const [isLocked, setIsLocked] = useState(false); const [hasAutoFramed, setHasAutoFramed] = useState(false); + const [isInitialLoadComplete, setIsInitialLoadComplete] = useState(false); const addNodes = useNodeStore(useShallow((state) => state.addNodes)); const addLinks = useEdgeStore(useShallow((state) => state.addLinks)); const updateNodeStatus = useNodeStore( @@ -174,11 +175,23 @@ export const useFlow = () => { if (customNodes.length > 0 && graph?.links) { const timer = setTimeout(() => { useHistoryStore.getState().initializeHistory(); + // Mark initial load as complete after history is initialized + setIsInitialLoadComplete(true); }, 100); return () => clearTimeout(timer); } }, [customNodes, graph?.links]); + // Also mark as complete for new flows (no flowID) after a short delay + useEffect(() => { + if (!flowID && !isGraphLoading && !isBlocksLoading) { + const timer = setTimeout(() => { + setIsInitialLoadComplete(true); + }, 200); + return () => clearTimeout(timer); + } + }, [flowID, isGraphLoading, isBlocksLoading]); + useEffect(() => { return () => { useNodeStore.getState().setNodes([]); @@ -217,6 +230,7 @@ export const useFlow = () => { useEffect(() => { setHasAutoFramed(false); + setIsInitialLoadComplete(false); }, [flowID, flowVersion]); // Drag and drop block from block menu @@ -253,6 +267,7 @@ export const useFlow = () => { return { isFlowContentLoading: isGraphLoading || isBlocksLoading, + isInitialLoadComplete, onDragOver, onDrop, isLocked, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/hooks/useSaveGraph.ts b/autogpt_platform/frontend/src/app/(platform)/build/hooks/useSaveGraph.ts index d0b488f26c..505303cc1e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/hooks/useSaveGraph.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/hooks/useSaveGraph.ts @@ -15,6 +15,11 @@ import { useEdgeStore } from "../stores/edgeStore"; import { graphsEquivalent } from "../components/NewControlPanel/NewSaveControl/helpers"; import { useGraphStore } from "../stores/graphStore"; import { useShallow } from "zustand/react/shallow"; +import { + draftService, + clearTempFlowId, + getTempFlowId, +} from "@/services/builder-draft/draft-service"; export type SaveGraphOptions = { showToast?: boolean; @@ -52,12 +57,19 @@ export const useSaveGraph = ({ const { mutateAsync: createNewGraph, isPending: isCreating } = usePostV1CreateNewGraph({ mutation: { - onSuccess: (response) => { + onSuccess: async (response) => { const data = response.data as GraphModel; setQueryStates({ flowID: data.id, flowVersion: data.version, }); + + const tempFlowId = getTempFlowId(); + if (tempFlowId) { + await draftService.deleteDraft(tempFlowId); + clearTempFlowId(); + } + onSuccess?.(data); if (showToast) { toast({ @@ -82,12 +94,18 @@ export const useSaveGraph = ({ const { mutateAsync: updateGraph, isPending: isUpdating } = usePutV1UpdateGraphVersion({ mutation: { - onSuccess: (response) => { + onSuccess: async (response) => { const data = response.data as GraphModel; setQueryStates({ flowID: data.id, flowVersion: data.version, }); + + // Clear the draft for this flow after successful save + if (data.id) { + await draftService.deleteDraft(data.id); + } + onSuccess?.(data); if (showToast) { toast({ diff --git a/autogpt_platform/frontend/src/lib/dexie/db.ts b/autogpt_platform/frontend/src/lib/dexie/db.ts new file mode 100644 index 0000000000..05e749ca4b --- /dev/null +++ b/autogpt_platform/frontend/src/lib/dexie/db.ts @@ -0,0 +1,46 @@ +import Dexie, { type EntityTable } from "dexie"; +import type { CustomNode } from "@/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode"; +import type { CustomEdge } from "@/app/(platform)/build/components/FlowEditor/edges/CustomEdge"; + +// 24 hrs expiry +export const DRAFT_EXPIRY_MS = 24 * 60 * 60 * 1000; + +export interface BuilderDraft { + id: string; + nodes: CustomNode[]; + edges: CustomEdge[]; + graphSchemas: { + input: Record | null; + credentials: Record | null; + output: Record | null; + }; + nodeCounter: number; + savedAt: number; + flowVersion?: number; +} + +class BuilderDatabase extends Dexie { + drafts!: EntityTable; + + constructor() { + super("AutoGPTBuilderDB"); + + this.version(1).stores({ + drafts: "id, savedAt", + }); + } +} + +// Singleton database instance +export const db = new BuilderDatabase(); + +export async function cleanupExpiredDrafts(): Promise { + const expiryThreshold = Date.now() - DRAFT_EXPIRY_MS; + + const deletedCount = await db.drafts + .where("savedAt") + .below(expiryThreshold) + .delete(); + + return deletedCount; +} diff --git a/autogpt_platform/frontend/src/lib/dexie/draft-utils.ts b/autogpt_platform/frontend/src/lib/dexie/draft-utils.ts new file mode 100644 index 0000000000..185ebf92b4 --- /dev/null +++ b/autogpt_platform/frontend/src/lib/dexie/draft-utils.ts @@ -0,0 +1,33 @@ +import type { CustomNode } from "@/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode"; +import type { CustomEdge } from "@/app/(platform)/build/components/FlowEditor/edges/CustomEdge"; + +export function cleanNode(node: CustomNode) { + return { + id: node.id, + position: node.position, + data: { + hardcodedValues: node.data.hardcodedValues, + title: node.data.title, + block_id: node.data.block_id, + metadata: node.data.metadata, + }, + }; +} + +export function cleanEdge(edge: CustomEdge) { + return { + id: edge.id, + source: edge.source, + target: edge.target, + sourceHandle: edge.sourceHandle, + targetHandle: edge.targetHandle, + }; +} + +export function cleanNodes(nodes: CustomNode[]) { + return nodes.map(cleanNode); +} + +export function cleanEdges(edges: CustomEdge[]) { + return edges.map(cleanEdge); +} diff --git a/autogpt_platform/frontend/src/services/builder-draft/draft-service.ts b/autogpt_platform/frontend/src/services/builder-draft/draft-service.ts new file mode 100644 index 0000000000..6d35d23bf4 --- /dev/null +++ b/autogpt_platform/frontend/src/services/builder-draft/draft-service.ts @@ -0,0 +1,118 @@ +import { + db, + BuilderDraft, + DRAFT_EXPIRY_MS, + cleanupExpiredDrafts, +} from "../../lib/dexie/db"; +import type { CustomNode } from "@/app/(platform)/build/components/FlowEditor/nodes/CustomNode/CustomNode"; +import type { CustomEdge } from "@/app/(platform)/build/components/FlowEditor/edges/CustomEdge"; +import { cleanNodes, cleanEdges } from "../../lib/dexie/draft-utils"; +import isEqual from "lodash/isEqual"; +import { environment } from "@/services/environment"; + +const SESSION_TEMP_ID_KEY = "builder_temp_flow_id"; + +export function getOrCreateTempFlowId(): string { + if (environment.isServerSide()) { + return `temp_${crypto.randomUUID()}`; + } + + let tempId = sessionStorage.getItem(SESSION_TEMP_ID_KEY); + if (!tempId) { + tempId = `temp_${crypto.randomUUID()}`; + sessionStorage.setItem(SESSION_TEMP_ID_KEY, tempId); + } + return tempId; +} + +export function clearTempFlowId(): void { + if (environment.isClientSide()) { + sessionStorage.removeItem(SESSION_TEMP_ID_KEY); + } +} + +export function getTempFlowId(): string | null { + if (environment.isServerSide()) { + return null; + } + return sessionStorage.getItem(SESSION_TEMP_ID_KEY); +} + +export interface DraftData { + nodes: CustomNode[]; + edges: CustomEdge[]; + graphSchemas: { + input: Record | null; + credentials: Record | null; + output: Record | null; + }; + nodeCounter: number; + flowVersion?: number; +} + +export const draftService = { + async saveDraft(flowId: string, data: DraftData): Promise { + const draft: BuilderDraft = { + id: flowId, + nodes: data.nodes, + edges: data.edges, + graphSchemas: data.graphSchemas, + nodeCounter: data.nodeCounter, + savedAt: Date.now(), + flowVersion: data.flowVersion, + }; + + await db.drafts.put(draft); + }, + + async loadDraft(flowId: string): Promise { + const draft = await db.drafts.get(flowId); + + if (!draft) { + return null; + } + const age = Date.now() - draft.savedAt; + if (age > DRAFT_EXPIRY_MS) { + await this.deleteDraft(flowId); + return null; + } + + return draft; + }, + + async deleteDraft(flowId: string): Promise { + await db.drafts.delete(flowId); + }, + + async hasDraft(flowId: string): Promise { + const draft = await db.drafts.get(flowId); + if (!draft) return false; + + // Check expiry + const age = Date.now() - draft.savedAt; + if (age > DRAFT_EXPIRY_MS) { + await this.deleteDraft(flowId); + return false; + } + + return true; + }, + + isDraftDifferent( + draft: BuilderDraft, + currentNodes: CustomNode[], + currentEdges: CustomEdge[], + ): boolean { + const draftNodesClean = cleanNodes(draft.nodes); + const currentNodesClean = cleanNodes(currentNodes); + const draftEdgesClean = cleanEdges(draft.edges); + const currentEdgesClean = cleanEdges(currentEdges); + + const nodesDifferent = !isEqual(draftNodesClean, currentNodesClean); + const edgesDifferent = !isEqual(draftEdgesClean, currentEdgesClean); + + return nodesDifferent || edgesDifferent; + }, + + cleanupExpired: cleanupExpiredDrafts, +}; From 003affca43a2fdfb16f7abadc009c7ea08bcdc86 Mon Sep 17 00:00:00 2001 From: Ubbe Date: Mon, 5 Jan 2026 16:09:47 +0700 Subject: [PATCH 25/25] refactor(frontend): fix new builder buttons (#11696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ Screenshot 2026-01-05 at 15 26 21 - Adjust the new builder to use the Design System components - Re-structure imports to match formatting rules - Small improvement on `use-get-flag` - Move file which is the main hook ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Run locally and check the new buttons look good --- .../components/AgentOutputs/AgentOutputs.tsx | 12 ++++-- .../components/BuilderActionButton.tsx | 37 ------------------- .../PublishToMarketplace.tsx | 14 ++++--- .../components/RunGraph/RunGraph.tsx | 27 ++++++-------- .../ScheduleGraph/ScheduleGraph.tsx | 16 ++++---- .../NewControlPanel/ControlPanelButton.tsx | 2 +- .../NewBlockMenu/BlockMenu/BlockMenu.tsx | 11 +++--- .../NewControlPanel/NewControlPanel.tsx | 6 +-- .../NewSaveControl/NewSaveControl.tsx | 17 ++++----- .../NewSearchGraph/GraphMenu/GraphMenu.tsx | 8 ++-- .../NewControlPanel/UndoRedoButtons.tsx | 8 ++-- .../src/app/(platform)/build/page.tsx | 2 +- ...seBuilderViewTabs.ts => useBuilderView.ts} | 2 +- .../services/feature-flags/use-get-flag.ts | 2 +- 14 files changed, 65 insertions(+), 99 deletions(-) delete mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx rename autogpt_platform/frontend/src/app/(platform)/build/{components/BuilderViewTabs/useBuilderViewTabs.ts => useBuilderView.ts} (95%) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/AgentOutputs/AgentOutputs.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/AgentOutputs/AgentOutputs.tsx index 237bea2ab0..de56bb46b8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/AgentOutputs/AgentOutputs.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/AgentOutputs/AgentOutputs.tsx @@ -16,6 +16,7 @@ import { SheetTitle, SheetTrigger, } from "@/components/__legacy__/ui/sheet"; +import { Button } from "@/components/atoms/Button/Button"; import { Tooltip, TooltipContent, @@ -25,7 +26,6 @@ import { import { BookOpenIcon } from "@phosphor-icons/react"; import { useMemo } from "react"; import { useShallow } from "zustand/react/shallow"; -import { BuilderActionButton } from "../BuilderActionButton"; export const AgentOutputs = ({ flowID }: { flowID: string | null }) => { const hasOutputs = useGraphStore(useShallow((state) => state.hasOutputs)); @@ -76,9 +76,13 @@ export const AgentOutputs = ({ flowID }: { flowID: string | null }) => { - - - + diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx deleted file mode 100644 index 549b432a38..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/BuilderActionButton.tsx +++ /dev/null @@ -1,37 +0,0 @@ -import { Button } from "@/components/atoms/Button/Button"; -import { ButtonProps } from "@/components/atoms/Button/helpers"; -import { cn } from "@/lib/utils"; -import { CircleNotchIcon } from "@phosphor-icons/react"; - -export const BuilderActionButton = ({ - children, - className, - isLoading, - ...props -}: ButtonProps & { isLoading?: boolean }) => { - return ( - - ); -}; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx index 500b8f0b47..e7381b7d52 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx @@ -1,12 +1,12 @@ -import { ShareIcon } from "@phosphor-icons/react"; -import { BuilderActionButton } from "../BuilderActionButton"; +import { Button } from "@/components/atoms/Button/Button"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; -import { usePublishToMarketplace } from "./usePublishToMarketplace"; import { PublishAgentModal } from "@/components/contextual/PublishAgentModal/PublishAgentModal"; +import { ShareIcon } from "@phosphor-icons/react"; +import { usePublishToMarketplace } from "./usePublishToMarketplace"; export const PublishToMarketplace = ({ flowID }: { flowID: string | null }) => { const { handlePublishToMarketplace, publishState, handleStateChange } = @@ -16,12 +16,14 @@ export const PublishToMarketplace = ({ flowID }: { flowID: string | null }) => { <> - - - + + Publish to Marketplace diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/RunGraph/RunGraph.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/RunGraph/RunGraph.tsx index f4c1a7331f..7ee00ec285 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/RunGraph/RunGraph.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/RunGraph/RunGraph.tsx @@ -1,15 +1,14 @@ -import { useRunGraph } from "./useRunGraph"; import { useGraphStore } from "@/app/(platform)/build/stores/graphStore"; -import { useShallow } from "zustand/react/shallow"; -import { PlayIcon, StopIcon } from "@phosphor-icons/react"; -import { cn } from "@/lib/utils"; -import { RunInputDialog } from "../RunInputDialog/RunInputDialog"; +import { Button } from "@/components/atoms/Button/Button"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; -import { BuilderActionButton } from "../BuilderActionButton"; +import { PlayIcon, StopIcon } from "@phosphor-icons/react"; +import { useShallow } from "zustand/react/shallow"; +import { RunInputDialog } from "../RunInputDialog/RunInputDialog"; +import { useRunGraph } from "./useRunGraph"; export const RunGraph = ({ flowID }: { flowID: string | null }) => { const { @@ -29,21 +28,19 @@ export const RunGraph = ({ flowID }: { flowID: string | null }) => { <> - {!isGraphRunning ? ( - + ) : ( - + )} - + {isGraphRunning ? "Stop agent" : "Run agent"} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/ScheduleGraph/ScheduleGraph.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/ScheduleGraph/ScheduleGraph.tsx index be588fa9e7..5cc8538de1 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/ScheduleGraph/ScheduleGraph.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/ScheduleGraph/ScheduleGraph.tsx @@ -1,14 +1,14 @@ -import { ClockIcon } from "@phosphor-icons/react"; -import { RunInputDialog } from "../RunInputDialog/RunInputDialog"; -import { useScheduleGraph } from "./useScheduleGraph"; +import { Button } from "@/components/atoms/Button/Button"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; +import { ClockIcon } from "@phosphor-icons/react"; import { CronSchedulerDialog } from "../CronSchedulerDialog/CronSchedulerDialog"; -import { BuilderActionButton } from "../BuilderActionButton"; +import { RunInputDialog } from "../RunInputDialog/RunInputDialog"; +import { useScheduleGraph } from "./useScheduleGraph"; export const ScheduleGraph = ({ flowID }: { flowID: string | null }) => { const { @@ -23,12 +23,14 @@ export const ScheduleGraph = ({ flowID }: { flowID: string | null }) => { - - - + +

Schedule Graph

diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/ControlPanelButton.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/ControlPanelButton.tsx index b176a002a7..36834becf6 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/ControlPanelButton.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/ControlPanelButton.tsx @@ -24,7 +24,7 @@ export const ControlPanelButton: React.FC = ({ role={as === "div" ? "button" : undefined} disabled={as === "button" ? disabled : undefined} className={cn( - "flex h-[4.25rem] w-[4.25rem] items-center justify-center whitespace-normal bg-white p-[1.38rem] text-zinc-800 shadow-none hover:cursor-pointer hover:bg-zinc-100 hover:text-zinc-950 focus:ring-0", + "flex w-auto items-center justify-center whitespace-normal bg-white px-4 py-4 text-zinc-800 shadow-none hover:cursor-pointer hover:bg-zinc-100 hover:text-zinc-950 focus:ring-0", selected && "bg-violet-50 text-violet-700 hover:cursor-default hover:bg-violet-50 hover:text-violet-700 active:bg-violet-50 active:text-violet-700", disabled && "cursor-not-allowed opacity-50 hover:cursor-not-allowed", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenu/BlockMenu.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenu/BlockMenu.tsx index bbaf410296..7533f501df 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenu/BlockMenu.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenu/BlockMenu.tsx @@ -1,18 +1,17 @@ -import React from "react"; +import { useControlPanelStore } from "@/app/(platform)/build/stores/controlPanelStore"; import { Popover, PopoverContent, PopoverTrigger, } from "@/components/__legacy__/ui/popover"; -import { BlockMenuContent } from "../BlockMenuContent/BlockMenuContent"; -import { ControlPanelButton } from "../../ControlPanelButton"; -import { LegoIcon } from "@phosphor-icons/react"; -import { useControlPanelStore } from "@/app/(platform)/build/stores/controlPanelStore"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; +import { LegoIcon } from "@phosphor-icons/react"; +import { ControlPanelButton } from "../../ControlPanelButton"; +import { BlockMenuContent } from "../BlockMenuContent/BlockMenuContent"; export const BlockMenu = () => { const { blockMenuOpen, setBlockMenuOpen } = useControlPanelStore(); @@ -28,7 +27,7 @@ export const BlockMenu = () => { selected={blockMenuOpen} className="rounded-none" > - + diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewControlPanel.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewControlPanel.tsx index 4828a73af4..8c8bbf1842 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewControlPanel.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewControlPanel.tsx @@ -7,10 +7,10 @@ import { useNewControlPanel } from "./useNewControlPanel"; import { GraphExecutionID } from "@/lib/autogpt-server-api"; // import { ControlPanelButton } from "../ControlPanelButton"; // import { GraphSearchMenu } from "../GraphMenu/GraphMenu"; -import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { Separator } from "@/components/__legacy__/ui/separator"; -import { NewSaveControl } from "./NewSaveControl/NewSaveControl"; +import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { CustomNode } from "../FlowEditor/nodes/CustomNode/CustomNode"; +import { NewSaveControl } from "./NewSaveControl/NewSaveControl"; import { UndoRedoButtons } from "./UndoRedoButtons"; export type Control = { @@ -56,7 +56,7 @@ export const NewControlPanel = memo( return (
diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSaveControl/NewSaveControl.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSaveControl/NewSaveControl.tsx index beae5c1705..cbf06af1c6 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSaveControl/NewSaveControl.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSaveControl/NewSaveControl.tsx @@ -1,22 +1,21 @@ -import React from "react"; +import { Card, CardContent, CardFooter } from "@/components/__legacy__/ui/card"; +import { Form, FormField } from "@/components/__legacy__/ui/form"; import { Popover, PopoverContent, PopoverTrigger, } from "@/components/__legacy__/ui/popover"; -import { Card, CardContent, CardFooter } from "@/components/__legacy__/ui/card"; +import { Button } from "@/components/atoms/Button/Button"; +import { Input } from "@/components/atoms/Input/Input"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; -import { useNewSaveControl } from "./useNewSaveControl"; -import { Form, FormField } from "@/components/__legacy__/ui/form"; -import { ControlPanelButton } from "../ControlPanelButton"; -import { useControlPanelStore } from "../../../stores/controlPanelStore"; import { FloppyDiskIcon } from "@phosphor-icons/react"; -import { Input } from "@/components/atoms/Input/Input"; -import { Button } from "@/components/atoms/Button/Button"; +import { useControlPanelStore } from "../../../stores/controlPanelStore"; +import { ControlPanelButton } from "../ControlPanelButton"; +import { useNewSaveControl } from "./useNewSaveControl"; export const NewSaveControl = () => { const { form, isSaving, graphVersion, handleSave } = useNewSaveControl(); @@ -33,7 +32,7 @@ export const NewSaveControl = () => { selected={saveControlOpen} className="rounded-none" > - + diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSearchGraph/GraphMenu/GraphMenu.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSearchGraph/GraphMenu/GraphMenu.tsx index c886919642..8ff96a598b 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSearchGraph/GraphMenu/GraphMenu.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewSearchGraph/GraphMenu/GraphMenu.tsx @@ -1,13 +1,13 @@ -import React from "react"; +import { CustomNode } from "@/app/(platform)/build/components/legacy-builder/CustomNode/CustomNode"; import { Popover, PopoverContent, PopoverTrigger, } from "@/components/__legacy__/ui/popover"; import { MagnifyingGlassIcon } from "@phosphor-icons/react"; -import { GraphSearchContent } from "../GraphMenuContent/GraphContent"; +import React from "react"; import { ControlPanelButton } from "../../ControlPanelButton"; -import { CustomNode } from "@/app/(platform)/build/components/legacy-builder/CustomNode/CustomNode"; +import { GraphSearchContent } from "../GraphMenuContent/GraphContent"; import { useGraphMenu } from "./useGraphMenu"; interface GraphSearchMenuProps { @@ -50,7 +50,7 @@ export const GraphSearchMenu: React.FC = ({ selected={blockMenuSelected === "search"} className="rounded-none" > - + diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/UndoRedoButtons.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/UndoRedoButtons.tsx index 6f134056c8..5510335104 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/UndoRedoButtons.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/UndoRedoButtons.tsx @@ -1,12 +1,12 @@ import { Separator } from "@/components/__legacy__/ui/separator"; -import { ControlPanelButton } from "./ControlPanelButton"; -import { ArrowUUpLeftIcon, ArrowUUpRightIcon } from "@phosphor-icons/react"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/atoms/Tooltip/BaseTooltip"; +import { ArrowUUpLeftIcon, ArrowUUpRightIcon } from "@phosphor-icons/react"; import { useHistoryStore } from "../../stores/historyStore"; +import { ControlPanelButton } from "./ControlPanelButton"; import { useEffect } from "react"; @@ -43,7 +43,7 @@ export const UndoRedoButtons = () => { - + Undo @@ -52,7 +52,7 @@ export const UndoRedoButtons = () => { - + Redo diff --git a/autogpt_platform/frontend/src/app/(platform)/build/page.tsx b/autogpt_platform/frontend/src/app/(platform)/build/page.tsx index f60c863657..f1d62ee5fb 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/page.tsx @@ -8,8 +8,8 @@ import { ReactFlowProvider } from "@xyflow/react"; import { useSearchParams } from "next/navigation"; import { useEffect } from "react"; import { BuilderViewTabs } from "./components/BuilderViewTabs/BuilderViewTabs"; -import { useBuilderView } from "./components/BuilderViewTabs/useBuilderViewTabs"; import { Flow } from "./components/FlowEditor/Flow/Flow"; +import { useBuilderView } from "./useBuilderView"; function BuilderContent() { const query = useSearchParams(); diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderViewTabs/useBuilderViewTabs.ts b/autogpt_platform/frontend/src/app/(platform)/build/useBuilderView.ts similarity index 95% rename from autogpt_platform/frontend/src/app/(platform)/build/components/BuilderViewTabs/useBuilderViewTabs.ts rename to autogpt_platform/frontend/src/app/(platform)/build/useBuilderView.ts index ac02becca5..e0e524ddf8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderViewTabs/useBuilderViewTabs.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/useBuilderView.ts @@ -1,7 +1,7 @@ import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { usePathname, useRouter, useSearchParams } from "next/navigation"; import { useEffect, useMemo } from "react"; -import { BuilderView } from "./BuilderViewTabs"; +import { BuilderView } from "./components/BuilderViewTabs/BuilderViewTabs"; export function useBuilderView() { const isNewFlowEditorEnabled = useGetFlag(Flag.NEW_FLOW_EDITOR); diff --git a/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts b/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts index f05d7c68a4..64b69895f3 100644 --- a/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts +++ b/autogpt_platform/frontend/src/services/feature-flags/use-get-flag.ts @@ -56,7 +56,7 @@ export function useGetFlag(flag: T): FlagValues[T] | null { const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true"; const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID; - const isLaunchDarklyConfigured = envEnabled && clientId; + const isLaunchDarklyConfigured = envEnabled && Boolean(clientId); if (!isLaunchDarklyConfigured || isPwMockEnabled) { return mockFlags[flag];