From 794aee25ab130723ad1ef8fdbb6067286cf44e99 Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 11:49:36 +0200 Subject: [PATCH 001/260] add full text search --- .../backend/backend/data/partial_types.py | 5 + .../backend/backend/server/v2/store/db.py | 223 +++++++++++++----- .../migration.sql | 93 ++++++++ autogpt_platform/backend/schema.prisma | 14 +- 4 files changed, 275 insertions(+), 60 deletions(-) create mode 100644 autogpt_platform/backend/backend/data/partial_types.py create mode 100644 autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql diff --git a/autogpt_platform/backend/backend/data/partial_types.py b/autogpt_platform/backend/backend/data/partial_types.py new file mode 100644 index 0000000000..befa32219f --- /dev/null +++ b/autogpt_platform/backend/backend/data/partial_types.py @@ -0,0 +1,5 @@ +import prisma.models + + +class StoreAgentWithRank(prisma.models.StoreAgent): + rank: float diff --git a/autogpt_platform/backend/backend/server/v2/store/db.py b/autogpt_platform/backend/backend/server/v2/store/db.py index 84e63b36ac..4ad0b72d29 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db.py +++ b/autogpt_platform/backend/backend/server/v2/store/db.py @@ -1,5 +1,6 @@ import asyncio import logging +import typing from datetime import datetime, timezone import fastapi @@ -70,64 +71,176 @@ async def get_store_agents( logger.debug( f"Getting store agents. featured={featured}, creators={creators}, sorted_by={sorted_by}, search={search_query}, category={category}, page={page}" ) - search_term = sanitize_query(search_query) - where_clause: prisma.types.StoreAgentWhereInput = {"is_available": True} - if featured: - where_clause["featured"] = featured - if creators: - where_clause["creator_username"] = {"in": creators} - if category: - where_clause["categories"] = {"has": category} - - if search_term: - where_clause["OR"] = [ - {"agent_name": {"contains": search_term, "mode": "insensitive"}}, - {"description": {"contains": search_term, "mode": "insensitive"}}, - ] - - order_by = [] - if sorted_by == "rating": - order_by.append({"rating": "desc"}) - elif sorted_by == "runs": - order_by.append({"runs": "desc"}) - elif sorted_by == "name": - order_by.append({"agent_name": "asc"}) try: - agents = await prisma.models.StoreAgent.prisma().find_many( - where=where_clause, - order=order_by, - skip=(page - 1) * page_size, - take=page_size, - ) - - total = await prisma.models.StoreAgent.prisma().count(where=where_clause) - total_pages = (total + page_size - 1) // page_size - - store_agents: list[backend.server.v2.store.model.StoreAgent] = [] - for agent in agents: - try: - # Create the StoreAgent object safely - store_agent = backend.server.v2.store.model.StoreAgent( - slug=agent.slug, - agent_name=agent.agent_name, - agent_image=agent.agent_image[0] if agent.agent_image else "", - creator=agent.creator_username or "Needs Profile", - creator_avatar=agent.creator_avatar or "", - sub_heading=agent.sub_heading, - description=agent.description, - runs=agent.runs, - rating=agent.rating, + # If search_query is provided, use full-text search + if search_query: + search_term = sanitize_query(search_query) + if not search_term: + # Return empty results for invalid search query + return backend.server.v2.store.model.StoreAgentsResponse( + agents=[], + pagination=backend.server.v2.store.model.Pagination( + current_page=page, + total_items=0, + total_pages=0, + page_size=page_size, + ), ) - # Add to the list only if creation was successful - store_agents.append(store_agent) - except Exception as e: - # Skip this agent if there was an error - # You could log the error here if needed - logger.error( - f"Error parsing Store agent when getting store agents from db: {e}" - ) - continue + + offset = (page - 1) * page_size + + # Build filter conditions + filter_conditions = [] + filter_conditions.append("is_available = true") + + if featured: + filter_conditions.append("featured = true") + if creators: + creator_list = "','".join(creators) + filter_conditions.append(f"creator_username IN ('{creator_list}')") + if category: + filter_conditions.append(f"'{category}' = ANY(categories)") + + where_filter = ( + " AND ".join(filter_conditions) if filter_conditions else "1=1" + ) + + # Build ORDER BY clause + if sorted_by == "rating": + order_by_clause = "rating DESC, rank DESC" + elif sorted_by == "runs": + order_by_clause = "runs DESC, rank DESC" + elif sorted_by == "name": + order_by_clause = "agent_name ASC, rank DESC" + else: + order_by_clause = "rank DESC, updated_at DESC" + + # Execute full-text search query + sql_query = f""" + WITH query AS ( + SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q + FROM unnest(to_tsvector('{search_term}')) + ) + SELECT + slug, + agent_name, + agent_image, + creator_username, + creator_avatar, + sub_heading, + description, + runs, + rating, + categories, + featured, + is_available, + updated_at, + ts_rank(CAST(search AS tsvector), query.q) AS rank + FROM "StoreAgent", query + WHERE {where_filter} AND search @@ query.q + ORDER BY {order_by_clause} + LIMIT {page_size} + OFFSET {offset}; + """ + + # Count query for pagination + count_query = f""" + WITH query AS ( + SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q + FROM unnest(to_tsvector('{search_term}')) + ) + SELECT COUNT(*) as count + FROM "StoreAgent", query + WHERE {where_filter} AND search @@ query.q; + """ + + # Execute both queries + agents = await prisma.client.get_client().query_raw( + query=typing.cast(typing.LiteralString, sql_query) + ) + + count_result = await prisma.client.get_client().query_raw( + query=typing.cast(typing.LiteralString, count_query) + ) + + total = count_result[0]["count"] if count_result else 0 + total_pages = (total + page_size - 1) // page_size + + # Convert raw results to StoreAgent models + store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + for agent in agents: + try: + store_agent = backend.server.v2.store.model.StoreAgent( + slug=agent["slug"], + agent_name=agent["agent_name"], + agent_image=( + agent["agent_image"][0] if agent["agent_image"] else "" + ), + creator=agent["creator_username"] or "Needs Profile", + creator_avatar=agent["creator_avatar"] or "", + sub_heading=agent["sub_heading"], + description=agent["description"], + runs=agent["runs"], + rating=agent["rating"], + ) + store_agents.append(store_agent) + except Exception as e: + logger.error(f"Error parsing Store agent from search results: {e}") + continue + + else: + # Non-search query path (original logic) + where_clause: prisma.types.StoreAgentWhereInput = {"is_available": True} + if featured: + where_clause["featured"] = featured + if creators: + where_clause["creator_username"] = {"in": creators} + if category: + where_clause["categories"] = {"has": category} + + order_by = [] + if sorted_by == "rating": + order_by.append({"rating": "desc"}) + elif sorted_by == "runs": + order_by.append({"runs": "desc"}) + elif sorted_by == "name": + order_by.append({"agent_name": "asc"}) + + agents = await prisma.models.StoreAgent.prisma().find_many( + where=where_clause, + order=order_by, + skip=(page - 1) * page_size, + take=page_size, + ) + + total = await prisma.models.StoreAgent.prisma().count(where=where_clause) + total_pages = (total + page_size - 1) // page_size + + store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + for agent in agents: + try: + # Create the StoreAgent object safely + store_agent = backend.server.v2.store.model.StoreAgent( + slug=agent.slug, + agent_name=agent.agent_name, + agent_image=agent.agent_image[0] if agent.agent_image else "", + creator=agent.creator_username or "Needs Profile", + creator_avatar=agent.creator_avatar or "", + sub_heading=agent.sub_heading, + description=agent.description, + runs=agent.runs, + rating=agent.rating, + ) + # Add to the list only if creation was successful + store_agents.append(store_agent) + except Exception as e: + # Skip this agent if there was an error + # You could log the error here if needed + logger.error( + f"Error parsing Store agent when getting store agents from db: {e}" + ) + continue logger.debug(f"Found {len(store_agents)} agents") return backend.server.v2.store.model.StoreAgentsResponse( diff --git a/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql new file mode 100644 index 0000000000..5c35ca7a65 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql @@ -0,0 +1,93 @@ +-- AlterTable +ALTER TABLE "StoreListingVersion" ADD COLUMN "search" tsvector DEFAULT ''::tsvector; + +-- Add trigger to update the search column with the tsvector of the agent +-- Function to be invoked by trigger + +CREATE OR REPLACE FUNCTION update_tsvector_column() RETURNS TRIGGER AS $$ + +BEGIN + +NEW.search := to_tsvector('english', COALESCE(NEW.description, '')|| ' ' ||COALESCE(NEW.name, '')|| ' ' ||COALESCE(NEW.subHeading, '')|| ' ' ||COALESCE(NEW.description, '')); + +RETURN NEW; + +END; + +$$ LANGUAGE plpgsql SECURITY definer SET search_path = public, pg_temp; + +-- Trigger that keeps the TSVECTOR up to date + +DROP TRIGGER IF EXISTS "update_tsvector" ON "StoreListingVersion"; + +CREATE TRIGGER "update_tsvector" + +BEFORE INSERT OR UPDATE ON "StoreListingVersion" + +FOR EACH ROW + +EXECUTE FUNCTION update_tsvector_column (); + +BEGIN; + +-- Drop and recreate the StoreAgent view with isAvailable field +DROP VIEW IF EXISTS "StoreAgent"; + +CREATE OR REPLACE VIEW "StoreAgent" AS +WITH latest_versions AS ( + SELECT + "storeListingId", + MAX(version) AS max_version + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +), +agent_versions AS ( + SELECT + "storeListingId", + array_agg(DISTINCT version::text ORDER BY version::text) AS versions + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +) +SELECT + sl.id AS listing_id, + slv.id AS "storeListingVersionId", + slv."createdAt" AS updated_at, + sl.slug, + COALESCE(slv.name, '') AS agent_name, + slv."videoUrl" AS agent_video, + COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image, + slv."isFeatured" AS featured, + p.username AS creator_username, -- Allow NULL for malformed sub-agents + p."avatarUrl" AS creator_avatar, -- Allow NULL for malformed sub-agents + slv."subHeading" AS sub_heading, + slv.description, + slv.categories, + slv.search, + COALESCE(ar.run_count, 0::bigint) AS runs, + COALESCE(rs.avg_rating, 0.0)::double precision AS rating, + COALESCE(av.versions, ARRAY[slv.version::text]) AS versions, + slv."isAvailable" AS is_available -- Add isAvailable field to filter sub-agents +FROM "StoreListing" sl +JOIN latest_versions lv + ON sl.id = lv."storeListingId" +JOIN "StoreListingVersion" slv + ON slv."storeListingId" = lv."storeListingId" + AND slv.version = lv.max_version + AND slv."submissionStatus" = 'APPROVED' +JOIN "AgentGraph" a + ON slv."agentGraphId" = a.id + AND slv."agentGraphVersion" = a.version +LEFT JOIN "Profile" p + ON sl."owningUserId" = p."userId" +LEFT JOIN "mv_review_stats" rs + ON sl.id = rs."storeListingId" +LEFT JOIN "mv_agent_run_counts" ar + ON a.id = ar."agentGraphId" +LEFT JOIN agent_versions av + ON sl.id = av."storeListingId" +WHERE sl."isDeleted" = false + AND sl."hasApprovedVersion" = true; + +COMMIT; \ No newline at end of file diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 7556c45918..d1b878f6fe 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -5,10 +5,11 @@ datasource db { } generator client { - provider = "prisma-client-py" - recursive_type_depth = -1 - interface = "asyncio" - previewFeatures = ["views"] + provider = "prisma-client-py" + recursive_type_depth = -1 + interface = "asyncio" + previewFeatures = ["views", "fullTextSearch"] + partial_type_generator = "backend/data/partial_types.py" } // User model to mirror Auth provider users @@ -663,6 +664,7 @@ view StoreAgent { sub_heading String description String categories String[] + search Unsupported("tsvector")? @default(dbgenerated("''::tsvector")) runs Int rating Float versions String[] @@ -746,7 +748,7 @@ model StoreListing { slug String // Allow this agent to be used during onboarding - useForOnboarding Boolean @default(false) + useForOnboarding Boolean @default(false) // The currently active version that should be shown to users activeVersionId String? @unique @@ -797,6 +799,8 @@ model StoreListingVersion { // Old versions can be made unavailable by the author if desired isAvailable Boolean @default(true) + search Unsupported("tsvector")? @default(dbgenerated("''::tsvector")) + // Version workflow state submissionStatus SubmissionStatus @default(DRAFT) submittedAt DateTime? From cb532e1c4d596b4f9da06a6a36f292e21d0623de Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 12:08:04 +0200 Subject: [PATCH 002/260] update docker file to include partial types --- autogpt_platform/backend/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/autogpt_platform/backend/Dockerfile b/autogpt_platform/backend/Dockerfile index 70b31e554d..3d49ff1d3a 100644 --- a/autogpt_platform/backend/Dockerfile +++ b/autogpt_platform/backend/Dockerfile @@ -92,6 +92,7 @@ FROM server_dependencies AS migrate # Migration stage only needs schema and migrations - much lighter than full backend COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/ +COPY autogpt_platform/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations FROM server_dependencies AS server From ebb4ebb025b4c0b277f8726ec22268de2b985c89 Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 12:10:38 +0200 Subject: [PATCH 003/260] include parital types in second place --- autogpt_platform/backend/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/autogpt_platform/backend/Dockerfile b/autogpt_platform/backend/Dockerfile index 3d49ff1d3a..73274136a4 100644 --- a/autogpt_platform/backend/Dockerfile +++ b/autogpt_platform/backend/Dockerfile @@ -47,6 +47,7 @@ RUN poetry install --no-ansi --no-root # Generate Prisma client COPY autogpt_platform/backend/schema.prisma ./ +COPY autogpt_platform/backend/data/partial_types.py ./backend/data/partial_types.py RUN poetry run prisma generate FROM debian:13-slim AS server_dependencies From 8610118ddc22e1ec14e20ed1614727cf5fc1a710 Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 12:14:26 +0200 Subject: [PATCH 004/260] ai sucks - fixing --- autogpt_platform/backend/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/backend/Dockerfile b/autogpt_platform/backend/Dockerfile index 73274136a4..7f51bad3a1 100644 --- a/autogpt_platform/backend/Dockerfile +++ b/autogpt_platform/backend/Dockerfile @@ -47,7 +47,7 @@ RUN poetry install --no-ansi --no-root # Generate Prisma client COPY autogpt_platform/backend/schema.prisma ./ -COPY autogpt_platform/backend/data/partial_types.py ./backend/data/partial_types.py +COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py RUN poetry run prisma generate FROM debian:13-slim AS server_dependencies @@ -93,7 +93,7 @@ FROM server_dependencies AS migrate # Migration stage only needs schema and migrations - much lighter than full backend COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/ -COPY autogpt_platform/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py +COPY autogpt_platform/backend/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations FROM server_dependencies AS server From 84ad4a9f95e8067e68353f0f6e1030f77068d760 Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 13:06:47 +0200 Subject: [PATCH 005/260] updated migration and query --- .../backend/backend/server/v2/store/db.py | 57 +++++++++---------- .../migration.sql | 39 +++++++------ 2 files changed, 45 insertions(+), 51 deletions(-) diff --git a/autogpt_platform/backend/backend/server/v2/store/db.py b/autogpt_platform/backend/backend/server/v2/store/db.py index 4ad0b72d29..74d188cc99 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db.py +++ b/autogpt_platform/backend/backend/server/v2/store/db.py @@ -118,41 +118,36 @@ async def get_store_agents( # Execute full-text search query sql_query = f""" - WITH query AS ( - SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q - FROM unnest(to_tsvector('{search_term}')) - ) - SELECT - slug, - agent_name, - agent_image, - creator_username, - creator_avatar, - sub_heading, - description, - runs, - rating, - categories, - featured, - is_available, - updated_at, - ts_rank(CAST(search AS tsvector), query.q) AS rank - FROM "StoreAgent", query - WHERE {where_filter} AND search @@ query.q - ORDER BY {order_by_clause} - LIMIT {page_size} - OFFSET {offset}; + SELECT + slug, + agent_name, + agent_image, + creator_username, + creator_avatar, + sub_heading, + description, + runs, + rating, + categories, + featured, + is_available, + updated_at, + ts_rank_cd(search, query) AS rank + FROM "StoreAgent", + plainto_tsquery('english', '{search_term}') AS query + WHERE {where_filter} + AND search @@ query + ORDER BY rank DESC, {order_by_clause} + LIMIT {page_size} OFFSET {offset} """ # Count query for pagination count_query = f""" - WITH query AS ( - SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q - FROM unnest(to_tsvector('{search_term}')) - ) - SELECT COUNT(*) as count - FROM "StoreAgent", query - WHERE {where_filter} AND search @@ query.q; + SELECT COUNT(*) as count + FROM "StoreAgent", + plainto_tsquery('english', '{search_term}') AS query + WHERE {where_filter} + AND search @@ query """ # Execute both queries diff --git a/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql index 5c35ca7a65..7d3cd381ab 100644 --- a/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql +++ b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql @@ -4,31 +4,29 @@ ALTER TABLE "StoreListingVersion" ADD COLUMN "search" tsvector DEFAULT ''::tsvec -- Add trigger to update the search column with the tsvector of the agent -- Function to be invoked by trigger -CREATE OR REPLACE FUNCTION update_tsvector_column() RETURNS TRIGGER AS $$ - -BEGIN - -NEW.search := to_tsvector('english', COALESCE(NEW.description, '')|| ' ' ||COALESCE(NEW.name, '')|| ' ' ||COALESCE(NEW.subHeading, '')|| ' ' ||COALESCE(NEW.description, '')); - -RETURN NEW; - -END; - -$$ LANGUAGE plpgsql SECURITY definer SET search_path = public, pg_temp; - --- Trigger that keeps the TSVECTOR up to date - +-- Drop the trigger first DROP TRIGGER IF EXISTS "update_tsvector" ON "StoreListingVersion"; +-- Drop the function completely +DROP FUNCTION IF EXISTS update_tsvector_column(); + +-- Now recreate it fresh +CREATE OR REPLACE FUNCTION update_tsvector_column() RETURNS TRIGGER AS $$ +BEGIN + NEW.search := to_tsvector('english', + COALESCE(NEW.name, '') || ' ' || + COALESCE(NEW.description, '') || ' ' || + COALESCE(NEW."subHeading", '') + ); + RETURN NEW; +END; +$$ LANGUAGE plpgsql SECURITY DEFINER SET search_path = platform, pg_temp; + +-- Recreate the trigger CREATE TRIGGER "update_tsvector" - BEFORE INSERT OR UPDATE ON "StoreListingVersion" - FOR EACH ROW - -EXECUTE FUNCTION update_tsvector_column (); - -BEGIN; +EXECUTE FUNCTION update_tsvector_column(); -- Drop and recreate the StoreAgent view with isAvailable field DROP VIEW IF EXISTS "StoreAgent"; @@ -68,6 +66,7 @@ SELECT COALESCE(ar.run_count, 0::bigint) AS runs, COALESCE(rs.avg_rating, 0.0)::double precision AS rating, COALESCE(av.versions, ARRAY[slv.version::text]) AS versions, + COALESCE(sl."useForOnboarding", false) AS "useForOnboarding", slv."isAvailable" AS is_available -- Add isAvailable field to filter sub-agents FROM "StoreListing" sl JOIN latest_versions lv From 773f545cfd77c908754bdfe5640fe40e833b8e7d Mon Sep 17 00:00:00 2001 From: Swifty Date: Thu, 16 Oct 2025 13:38:01 +0200 Subject: [PATCH 006/260] update existing rows when migration is ran --- .../20251016093049_add_full_text_search/migration.sql | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql index 7d3cd381ab..b3f90ebd3c 100644 --- a/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql +++ b/autogpt_platform/backend/migrations/20251016093049_add_full_text_search/migration.sql @@ -28,6 +28,14 @@ BEFORE INSERT OR UPDATE ON "StoreListingVersion" FOR EACH ROW EXECUTE FUNCTION update_tsvector_column(); +UPDATE "StoreListingVersion" +SET search = to_tsvector('english', + COALESCE(name, '') || ' ' || + COALESCE(description, '') || ' ' || + COALESCE("subHeading", '') +) +WHERE search IS NULL; + -- Drop and recreate the StoreAgent view with isAvailable field DROP VIEW IF EXISTS "StoreAgent"; From 3c16de22ef5098a8b492ca5669a4abb156aaa67e Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 10:51:58 +0200 Subject: [PATCH 007/260] add test data creation to makefile and test it --- autogpt_platform/Makefile | 3 +++ autogpt_platform/backend/test/test_data_creator.py | 14 ++++++++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/autogpt_platform/Makefile b/autogpt_platform/Makefile index b8a3261fcf..fde25ac136 100644 --- a/autogpt_platform/Makefile +++ b/autogpt_platform/Makefile @@ -35,6 +35,9 @@ run-backend: run-frontend: cd frontend && pnpm dev +test-data: + cd backend && poetry run python test/test_data_creator.py + help: @echo "Usage: make " @echo "Targets:" diff --git a/autogpt_platform/backend/test/test_data_creator.py b/autogpt_platform/backend/test/test_data_creator.py index 6bb3b9c4e2..53a8a5ba50 100644 --- a/autogpt_platform/backend/test/test_data_creator.py +++ b/autogpt_platform/backend/test/test_data_creator.py @@ -19,7 +19,7 @@ images: { import asyncio import random from datetime import datetime - +import pytest import prisma.enums from autogpt_libs.api_key.keysmith import APIKeySmith from faker import Faker @@ -498,9 +498,6 @@ async def main(): if store_listing_versions and random.random() < 0.5 else None ), - "agentInput": ( - Json({"test": "data"}) if random.random() < 0.3 else None - ), "onboardingAgentExecutionId": ( random.choice(agent_graph_executions).id if agent_graph_executions and random.random() < 0.3 @@ -570,5 +567,14 @@ async def main(): print("Test data creation completed successfully!") +@pytest.mark.asyncio +@pytest.mark.integration +async def test_main_function_runs_without_errors(): + try: + await main() + except Exception as e: + assert False, f"main() raised an exception: {e}" + + if __name__ == "__main__": asyncio.run(main()) From 3b5d91939943e62523a389e790abcad3422eeda6 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 10:56:45 +0200 Subject: [PATCH 008/260] fix formatting --- autogpt_platform/backend/test/test_data_creator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/backend/test/test_data_creator.py b/autogpt_platform/backend/test/test_data_creator.py index 53a8a5ba50..36f42fda82 100644 --- a/autogpt_platform/backend/test/test_data_creator.py +++ b/autogpt_platform/backend/test/test_data_creator.py @@ -19,8 +19,9 @@ images: { import asyncio import random from datetime import datetime -import pytest + import prisma.enums +import pytest from autogpt_libs.api_key.keysmith import APIKeySmith from faker import Faker from prisma import Json, Prisma @@ -574,7 +575,7 @@ async def test_main_function_runs_without_errors(): await main() except Exception as e: assert False, f"main() raised an exception: {e}" - + if __name__ == "__main__": asyncio.run(main()) From 1ed224d48166e31a0f2440769f38efa15045a998 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 11:12:00 +0200 Subject: [PATCH 009/260] simplify test and add reset-db make command --- autogpt_platform/Makefile | 9 ++++++++- autogpt_platform/backend/test/test_data_creator.py | 5 +---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/autogpt_platform/Makefile b/autogpt_platform/Makefile index fde25ac136..8b4732edd2 100644 --- a/autogpt_platform/Makefile +++ b/autogpt_platform/Makefile @@ -8,6 +8,11 @@ start-core: stop-core: docker compose stop deps +reset-db: + rm -rf db/docker/volumes/db/data + cd backend && poetry run prisma migrate deploy + cd backend && poetry run prisma generate + # View logs for core services logs-core: docker compose logs -f deps @@ -43,8 +48,10 @@ help: @echo "Targets:" @echo " start-core - Start just the core services (Supabase, Redis, RabbitMQ) in background" @echo " stop-core - Stop the core services" + @echo " reset-db - Reset the database by deleting the volume" @echo " logs-core - Tail the logs for core services" @echo " format - Format & lint backend (Python) and frontend (TypeScript) code" @echo " migrate - Run backend database migrations" @echo " run-backend - Run the backend FastAPI server" - @echo " run-frontend - Run the frontend Next.js development server" \ No newline at end of file + @echo " run-frontend - Run the frontend Next.js development server" + @echo " test-data - Run the test data creator" \ No newline at end of file diff --git a/autogpt_platform/backend/test/test_data_creator.py b/autogpt_platform/backend/test/test_data_creator.py index 36f42fda82..befb1dcacd 100644 --- a/autogpt_platform/backend/test/test_data_creator.py +++ b/autogpt_platform/backend/test/test_data_creator.py @@ -571,10 +571,7 @@ async def main(): @pytest.mark.asyncio @pytest.mark.integration async def test_main_function_runs_without_errors(): - try: - await main() - except Exception as e: - assert False, f"main() raised an exception: {e}" + await main() if __name__ == "__main__": From 29895c290f306c1b6525e4e18321242a5deb8276 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 13:34:35 +0200 Subject: [PATCH 010/260] store providers in db --- .../blocks/generic_webhook/__init__.py | 9 --- .../backend/backend/data/model.py | 1 + .../backend/integrations/credentials_store.py | 25 ++++++ .../backend/backend/sdk/builder.py | 36 ++++----- autogpt_platform/backend/backend/sdk/db.py | 41 ++++++++++ .../backend/backend/sdk/provider.py | 20 ++++- .../backend/backend/sdk/registry.py | 76 ++++++++++++++++++- .../backend/backend/server/rest_api.py | 2 +- .../migration.sql | 14 ++++ autogpt_platform/backend/schema.prisma | 16 ++++ .../backend/test/sdk/test_sdk_patching.py | 15 ++-- .../backend/test/sdk/test_sdk_registry.py | 16 +++- 12 files changed, 227 insertions(+), 44 deletions(-) create mode 100644 autogpt_platform/backend/backend/sdk/db.py create mode 100644 autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql diff --git a/autogpt_platform/backend/backend/blocks/generic_webhook/__init__.py b/autogpt_platform/backend/backend/blocks/generic_webhook/__init__.py index 2a38bb0765..e69de29bb2 100644 --- a/autogpt_platform/backend/backend/blocks/generic_webhook/__init__.py +++ b/autogpt_platform/backend/backend/blocks/generic_webhook/__init__.py @@ -1,9 +0,0 @@ -# Import the provider builder to ensure it's registered -from backend.sdk.registry import AutoRegistry - -from .triggers import GenericWebhookTriggerBlock, generic_webhook - -# Ensure the SDK registry is patched to include our webhook manager -AutoRegistry.patch_integrations() - -__all__ = ["GenericWebhookTriggerBlock", "generic_webhook"] diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py index bd78632ba8..846b076546 100644 --- a/autogpt_platform/backend/backend/data/model.py +++ b/autogpt_platform/backend/backend/data/model.py @@ -344,6 +344,7 @@ class APIKeyCredentials(_BaseCredentials): default=None, description="Unix timestamp (seconds) indicating when the API key expires (if at all)", ) + api_key_env_var: Optional[str] = None """Unix timestamp (seconds) indicating when the API key expires (if at all)""" def auth_header(self) -> str: diff --git a/autogpt_platform/backend/backend/integrations/credentials_store.py b/autogpt_platform/backend/backend/integrations/credentials_store.py index 75ae346d5d..63807f0747 100644 --- a/autogpt_platform/backend/backend/integrations/credentials_store.py +++ b/autogpt_platform/backend/backend/integrations/credentials_store.py @@ -26,6 +26,7 @@ ollama_credentials = APIKeyCredentials( id="744fdc56-071a-4761-b5a5-0af0ce10a2b5", provider="ollama", api_key=SecretStr("FAKE_API_KEY"), + api_key_env_var=None, title="Use Credits for Ollama", expires_at=None, ) @@ -34,6 +35,7 @@ revid_credentials = APIKeyCredentials( id="fdb7f412-f519-48d1-9b5f-d2f73d0e01fe", provider="revid", api_key=SecretStr(settings.secrets.revid_api_key), + api_key_env_var="REVID_API_KEY", title="Use Credits for Revid", expires_at=None, ) @@ -41,6 +43,7 @@ ideogram_credentials = APIKeyCredentials( id="760f84fc-b270-42de-91f6-08efe1b512d0", provider="ideogram", api_key=SecretStr(settings.secrets.ideogram_api_key), + api_key_env_var="IDEOGRAM_API_KEY", title="Use Credits for Ideogram", expires_at=None, ) @@ -48,6 +51,7 @@ replicate_credentials = APIKeyCredentials( id="6b9fc200-4726-4973-86c9-cd526f5ce5db", provider="replicate", api_key=SecretStr(settings.secrets.replicate_api_key), + api_key_env_var="REPLICATE_API_KEY", title="Use Credits for Replicate", expires_at=None, ) @@ -55,6 +59,7 @@ openai_credentials = APIKeyCredentials( id="53c25cb8-e3ee-465c-a4d1-e75a4c899c2a", provider="openai", api_key=SecretStr(settings.secrets.openai_api_key), + api_key_env_var="OPENAI_API_KEY", title="Use Credits for OpenAI", expires_at=None, ) @@ -62,6 +67,7 @@ aiml_api_credentials = APIKeyCredentials( id="aad82a89-9794-4ebb-977f-d736aa5260a3", provider="aiml_api", api_key=SecretStr(settings.secrets.aiml_api_key), + api_key_env_var="AIML_API_KEY", title="Use Credits for AI/ML API", expires_at=None, ) @@ -69,6 +75,7 @@ anthropic_credentials = APIKeyCredentials( id="24e5d942-d9e3-4798-8151-90143ee55629", provider="anthropic", api_key=SecretStr(settings.secrets.anthropic_api_key), + api_key_env_var="ANTHROPIC_API_KEY", title="Use Credits for Anthropic", expires_at=None, ) @@ -76,6 +83,7 @@ groq_credentials = APIKeyCredentials( id="4ec22295-8f97-4dd1-b42b-2c6957a02545", provider="groq", api_key=SecretStr(settings.secrets.groq_api_key), + api_key_env_var="GROQ_API_KEY", title="Use Credits for Groq", expires_at=None, ) @@ -83,6 +91,7 @@ did_credentials = APIKeyCredentials( id="7f7b0654-c36b-4565-8fa7-9a52575dfae2", provider="d_id", api_key=SecretStr(settings.secrets.did_api_key), + api_key_env_var="DID_API_KEY", title="Use Credits for D-ID", expires_at=None, ) @@ -90,6 +99,7 @@ jina_credentials = APIKeyCredentials( id="7f26de70-ba0d-494e-ba76-238e65e7b45f", provider="jina", api_key=SecretStr(settings.secrets.jina_api_key), + api_key_env_var="JINA_API_KEY", title="Use Credits for Jina", expires_at=None, ) @@ -97,6 +107,7 @@ unreal_credentials = APIKeyCredentials( id="66f20754-1b81-48e4-91d0-f4f0dd82145f", provider="unreal", api_key=SecretStr(settings.secrets.unreal_speech_api_key), + api_key_env_var="UNREAL_SPEECH_API_KEY", title="Use Credits for Unreal", expires_at=None, ) @@ -104,6 +115,7 @@ open_router_credentials = APIKeyCredentials( id="b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", provider="open_router", api_key=SecretStr(settings.secrets.open_router_api_key), + api_key_env_var="OPEN_ROUTER_API_KEY", title="Use Credits for Open Router", expires_at=None, ) @@ -111,6 +123,7 @@ fal_credentials = APIKeyCredentials( id="6c0f5bd0-9008-4638-9d79-4b40b631803e", provider="fal", api_key=SecretStr(settings.secrets.fal_api_key), + api_key_env_var="FAL_API_KEY", title="Use Credits for FAL", expires_at=None, ) @@ -118,6 +131,7 @@ exa_credentials = APIKeyCredentials( id="96153e04-9c6c-4486-895f-5bb683b1ecec", provider="exa", api_key=SecretStr(settings.secrets.exa_api_key), + api_key_env_var="EXA_API_KEY", title="Use Credits for Exa search", expires_at=None, ) @@ -125,6 +139,7 @@ e2b_credentials = APIKeyCredentials( id="78d19fd7-4d59-4a16-8277-3ce310acf2b7", provider="e2b", api_key=SecretStr(settings.secrets.e2b_api_key), + api_key_env_var="E2B_API_KEY", title="Use Credits for E2B", expires_at=None, ) @@ -132,6 +147,7 @@ nvidia_credentials = APIKeyCredentials( id="96b83908-2789-4dec-9968-18f0ece4ceb3", provider="nvidia", api_key=SecretStr(settings.secrets.nvidia_api_key), + api_key_env_var="NVIDIA_API_KEY", title="Use Credits for Nvidia", expires_at=None, ) @@ -139,6 +155,7 @@ screenshotone_credentials = APIKeyCredentials( id="3b1bdd16-8818-4bc2-8cbb-b23f9a3439ed", provider="screenshotone", api_key=SecretStr(settings.secrets.screenshotone_api_key), + api_key_env_var="SCREENSHOTONE_API_KEY", title="Use Credits for ScreenshotOne", expires_at=None, ) @@ -146,6 +163,7 @@ mem0_credentials = APIKeyCredentials( id="ed55ac19-356e-4243-a6cb-bc599e9b716f", provider="mem0", api_key=SecretStr(settings.secrets.mem0_api_key), + api_key_env_var="MEM0_API_KEY", title="Use Credits for Mem0", expires_at=None, ) @@ -154,6 +172,7 @@ apollo_credentials = APIKeyCredentials( id="544c62b5-1d0f-4156-8fb4-9525f11656eb", provider="apollo", api_key=SecretStr(settings.secrets.apollo_api_key), + api_key_env_var="APOLLO_API_KEY", title="Use Credits for Apollo", expires_at=None, ) @@ -162,6 +181,7 @@ smartlead_credentials = APIKeyCredentials( id="3bcdbda3-84a3-46af-8fdb-bfd2472298b8", provider="smartlead", api_key=SecretStr(settings.secrets.smartlead_api_key), + api_key_env_var="SMARTLEAD_API_KEY", title="Use Credits for SmartLead", expires_at=None, ) @@ -170,6 +190,7 @@ google_maps_credentials = APIKeyCredentials( id="9aa1bde0-4947-4a70-a20c-84daa3850d52", provider="google_maps", api_key=SecretStr(settings.secrets.google_maps_api_key), + api_key_env_var="GOOGLE_MAPS_API_KEY", title="Use Credits for Google Maps", expires_at=None, ) @@ -178,6 +199,7 @@ zerobounce_credentials = APIKeyCredentials( id="63a6e279-2dc2-448e-bf57-85776f7176dc", provider="zerobounce", api_key=SecretStr(settings.secrets.zerobounce_api_key), + api_key_env_var="ZEROBOUNCE_API_KEY", title="Use Credits for ZeroBounce", expires_at=None, ) @@ -186,6 +208,7 @@ enrichlayer_credentials = APIKeyCredentials( id="d9fce73a-6c1d-4e8b-ba2e-12a456789def", provider="enrichlayer", api_key=SecretStr(settings.secrets.enrichlayer_api_key), + api_key_env_var="ENRICHLAYER_API_KEY", title="Use Credits for Enrichlayer", expires_at=None, ) @@ -195,6 +218,7 @@ llama_api_credentials = APIKeyCredentials( id="d44045af-1c33-4833-9e19-752313214de2", provider="llama_api", api_key=SecretStr(settings.secrets.llama_api_key), + api_key_env_var="LLAMA_API_KEY", title="Use Credits for Llama API", expires_at=None, ) @@ -203,6 +227,7 @@ v0_credentials = APIKeyCredentials( id="c4e6d1a0-3b5f-4789-a8e2-9b123456789f", provider="v0", api_key=SecretStr(settings.secrets.v0_api_key), + api_key_env_var="V0_API_KEY", title="Use Credits for v0 by Vercel", expires_at=None, ) diff --git a/autogpt_platform/backend/backend/sdk/builder.py b/autogpt_platform/backend/backend/sdk/builder.py index 09949b256f..0f9f6140ee 100644 --- a/autogpt_platform/backend/backend/sdk/builder.py +++ b/autogpt_platform/backend/backend/sdk/builder.py @@ -17,9 +17,8 @@ from backend.data.model import ( ) from backend.integrations.oauth.base import BaseOAuthHandler from backend.integrations.webhooks._base import BaseWebhooksManager -from backend.sdk.provider import OAuthConfig, Provider +from backend.sdk.provider import OAuthConfig, Provider, ProviderRegister from backend.sdk.registry import AutoRegistry -from backend.util.settings import Settings logger = logging.getLogger(__name__) @@ -40,6 +39,7 @@ class ProviderBuilder: self._client_id_env_var: Optional[str] = None self._client_secret_env_var: Optional[str] = None self._extra_config: dict = {} + self._register: ProviderRegister = ProviderRegister(name=name) def with_oauth( self, @@ -48,6 +48,11 @@ class ProviderBuilder: client_id_env_var: Optional[str] = None, client_secret_env_var: Optional[str] = None, ) -> "ProviderBuilder": + + self._register.with_oauth = True + self._register.client_id_env_var = client_id_env_var + self._register.client_secret_env_var = client_secret_env_var + """Add OAuth support.""" if not client_id_env_var or not client_secret_env_var: client_id_env_var = f"{self.name}_client_id".upper() @@ -73,6 +78,8 @@ class ProviderBuilder: def with_api_key(self, env_var_name: str, title: str) -> "ProviderBuilder": """Add API key support with environment variable name.""" + self._register.with_api_key = True + self._register.api_key_env_var = env_var_name self._supported_auth_types.add("api_key") # Register the API key mapping @@ -91,30 +98,14 @@ class ProviderBuilder: ) return self - def with_api_key_from_settings( - self, settings_attr: str, title: str - ) -> "ProviderBuilder": - """Use existing API key from settings.""" - self._supported_auth_types.add("api_key") - - # Try to get the API key from settings - settings = Settings() - api_key = getattr(settings.secrets, settings_attr, None) - if api_key: - self._default_credentials.append( - APIKeyCredentials( - id=f"{self.name}-default", - provider=self.name, - api_key=api_key, - title=title, - ) - ) - return self - def with_user_password( self, username_env_var: str, password_env_var: str, title: str ) -> "ProviderBuilder": """Add username/password support with environment variable names.""" + self._register.with_user_password = True + self._register.username_env_var = username_env_var + self._register.password_env_var = password_env_var + self._supported_auth_types.add("user_password") # Check if credentials exist in environment @@ -174,6 +165,7 @@ class ProviderBuilder: supported_auth_types=self._supported_auth_types, api_client_factory=self._api_client_factory, error_handler=self._error_handler, + register=self._register, **self._extra_config, ) diff --git a/autogpt_platform/backend/backend/sdk/db.py b/autogpt_platform/backend/backend/sdk/db.py new file mode 100644 index 0000000000..292bf61275 --- /dev/null +++ b/autogpt_platform/backend/backend/sdk/db.py @@ -0,0 +1,41 @@ +from typing import Dict + +from prisma import Prisma + +from backend.sdk.provider import ProviderRegister + + +async def upsert_providers_bulk(providers: Dict[str, ProviderRegister]): + """Async version of bulk upsert providers with all fields using transaction for atomicity""" + async with Prisma() as prisma: + async with prisma.tx() as tx: + results = [] + for name, provider in providers.items(): + result = await tx.providerregistry.upsert( + where={"name": name}, + data={ + "create": { + "name": name, + "with_oauth": provider.with_oauth, + "client_id_env_var": provider.client_id_env_var, + "client_secret_env_var": provider.client_secret_env_var, + "with_api_key": provider.with_api_key, + "api_key_env_var": provider.api_key_env_var, + "with_user_password": provider.with_user_password, + "username_env_var": provider.username_env_var, + "password_env_var": provider.password_env_var, + }, + "update": { + "with_oauth": provider.with_oauth, + "client_id_env_var": provider.client_id_env_var, + "client_secret_env_var": provider.client_secret_env_var, + "with_api_key": provider.with_api_key, + "api_key_env_var": provider.api_key_env_var, + "with_user_password": provider.with_user_password, + "username_env_var": provider.username_env_var, + "password_env_var": provider.password_env_var, + }, + }, + ) + results.append(result) + return results diff --git a/autogpt_platform/backend/backend/sdk/provider.py b/autogpt_platform/backend/backend/sdk/provider.py index 98afbf05d5..a0663ea487 100644 --- a/autogpt_platform/backend/backend/sdk/provider.py +++ b/autogpt_platform/backend/backend/sdk/provider.py @@ -30,6 +30,23 @@ class OAuthConfig(BaseModel): client_secret_env_var: str +class ProviderRegister(BaseModel): + """Provider log configuration for SDK providers.""" + + name: str + + with_oauth: bool = False + client_id_env_var: Optional[str] = None + client_secret_env_var: Optional[str] = None + + with_api_key: bool = False + api_key_env_var: Optional[str] = None + + with_user_password: bool = False + username_env_var: Optional[str] = None + password_env_var: Optional[str] = None + + class Provider: """A configured provider that blocks can use. @@ -48,6 +65,7 @@ class Provider: def __init__( self, name: str, + register: ProviderRegister, oauth_config: Optional[OAuthConfig] = None, webhook_manager: Optional[Type[BaseWebhooksManager]] = None, default_credentials: Optional[List[Credentials]] = None, @@ -65,7 +83,7 @@ class Provider: self.supported_auth_types = supported_auth_types or set() self._api_client_factory = api_client_factory self._error_handler = error_handler - + self.register = register # Store any additional configuration self._extra_config = kwargs self.test_credentials_uuid = uuid.uuid4() diff --git a/autogpt_platform/backend/backend/sdk/registry.py b/autogpt_platform/backend/backend/sdk/registry.py index 5543a3ed96..9980f936a4 100644 --- a/autogpt_platform/backend/backend/sdk/registry.py +++ b/autogpt_platform/backend/backend/sdk/registry.py @@ -13,9 +13,11 @@ from backend.data.model import Credentials from backend.integrations.oauth.base import BaseOAuthHandler from backend.integrations.providers import ProviderName from backend.integrations.webhooks._base import BaseWebhooksManager +from backend.sdk.db import upsert_providers_bulk +from backend.sdk.provider import ProviderRegister if TYPE_CHECKING: - from backend.sdk.provider import Provider + from backend.sdk.provider import Provider, ProviderRegister logger = logging.getLogger(__name__) @@ -57,6 +59,7 @@ class AutoRegistry: _webhook_managers: Dict[str, Type[BaseWebhooksManager]] = {} _block_configurations: Dict[Type[Block], BlockConfiguration] = {} _api_key_mappings: Dict[str, str] = {} # provider -> env_var_name + _provider_registry: Dict[str, ProviderRegister] = {} @classmethod def register_provider(cls, provider: "Provider") -> None: @@ -64,6 +67,7 @@ class AutoRegistry: with cls._lock: cls._providers[provider.name] = provider + cls._provider_registry[provider.name] = provider.register # Register OAuth handler if provided if provider.oauth_config: # Dynamically set PROVIDER_NAME if not already set @@ -163,7 +167,7 @@ class AutoRegistry: cls._api_key_mappings.clear() @classmethod - def patch_integrations(cls) -> None: + async def patch_integrations(cls) -> None: """Patch existing integration points to use AutoRegistry.""" # OAuth handlers are handled by SDKAwareHandlersDict in oauth/__init__.py # No patching needed for OAuth handlers @@ -213,6 +217,73 @@ class AutoRegistry: creds_store: Any = backend.integrations.credentials_store + if "backend.integrations.providers" in sys.modules: + providers: Any = sys.modules["backend.integrations.providers"] + else: + import backend.integrations.providers + + providers: Any = backend.integrations.providers + + legacy_oauth_providers = { + providers.ProviderName.DISCORD.value: ProviderRegister( + name=providers.ProviderName.DISCORD.value, + with_oauth=True, + client_id_env_var="DISCORD_CLIENT_ID", + client_secret_env_var="DISCORD_CLIENT_SECRET", + ), + providers.ProviderName.GITHUB.value: ProviderRegister( + name=providers.ProviderName.GITHUB.value, + with_oauth=True, + client_id_env_var="GITHUB_CLIENT_ID", + client_secret_env_var="GITHUB_CLIENT_SECRET", + ), + providers.ProviderName.GOOGLE.value: ProviderRegister( + name=providers.ProviderName.GOOGLE.value, + with_oauth=True, + client_id_env_var="GOOGLE_CLIENT_ID", + client_secret_env_var="GOOGLE_CLIENT_SECRET", + ), + providers.ProviderName.NOTION.value: ProviderRegister( + name=providers.ProviderName.NOTION.value, + with_oauth=True, + client_id_env_var="NOTION_CLIENT_ID", + client_secret_env_var="NOTION_CLIENT_SECRET", + ), + providers.ProviderName.TWITTER.value: ProviderRegister( + name=providers.ProviderName.TWITTER.value, + with_oauth=True, + client_id_env_var="TWITTER_CLIENT_ID", + client_secret_env_var="TWITTER_CLIENT_SECRET", + ), + providers.ProviderName.TODOIST.value: ProviderRegister( + name=providers.ProviderName.TODOIST.value, + with_oauth=True, + client_id_env_var="TODOIST_CLIENT_ID", + client_secret_env_var="TODOIST_CLIENT_SECRET", + ), + } + + if hasattr(creds_store, "DEFAULT_CREDENTIALS"): + DEFAULT_CREDENTIALS = creds_store.DEFAULT_CREDENTIALS + for item in DEFAULT_CREDENTIALS: + new_cred = ProviderRegister( + name=item.provider, + with_api_key=True, + api_key_env_var=item.api_key_env_var, + ) + if item.provider in legacy_oauth_providers: + new_cred.with_oauth = True + new_cred.client_id_env_var = legacy_oauth_providers[ + item.provider + ].client_id_env_var + new_cred.client_secret_env_var = legacy_oauth_providers[ + item.provider + ].client_secret_env_var + + cls._provider_registry[item.provider] = new_cred + + await upsert_providers_bulk(providers=cls._provider_registry) + if hasattr(creds_store, "IntegrationCredentialsStore"): store_class = creds_store.IntegrationCredentialsStore if hasattr(store_class, "get_all_creds"): @@ -237,5 +308,6 @@ class AutoRegistry: logger.info( "Successfully patched IntegrationCredentialsStore.get_all_creds" ) + except Exception as e: logging.warning(f"Failed to patch credentials store: {e}") diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 7c3d97b748..7b70d64488 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -95,7 +95,7 @@ async def lifespan_context(app: fastapi.FastAPI): # Ensure SDK auto-registration is patched before initializing blocks from backend.sdk.registry import AutoRegistry - AutoRegistry.patch_integrations() + await AutoRegistry.patch_integrations() await backend.data.block.initialize_blocks() diff --git a/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql b/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql new file mode 100644 index 0000000000..1f0c3b1239 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql @@ -0,0 +1,14 @@ +-- CreateTable +CREATE TABLE "ProviderRegistry" ( + "name" TEXT NOT NULL, + "with_oauth" BOOLEAN NOT NULL DEFAULT false, + "client_id_env_var" TEXT, + "client_secret_env_var" TEXT, + "with_api_key" BOOLEAN NOT NULL DEFAULT false, + "api_key_env_var" TEXT, + "with_user_password" BOOLEAN NOT NULL DEFAULT false, + "username_env_var" TEXT, + "password_env_var" TEXT, + + CONSTRAINT "ProviderRegistry_pkey" PRIMARY KEY ("name") +); diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index fba1492f5a..4a507fe6a0 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -60,6 +60,22 @@ model User { NotificationBatches UserNotificationBatch[] } +// This model describes the providers that are available to the user. +model ProviderRegistry { + name String @id + + with_oauth Boolean @default(false) + client_id_env_var String? + client_secret_env_var String? + + with_api_key Boolean @default(false) + api_key_env_var String? + + with_user_password Boolean @default(false) + username_env_var String? + password_env_var String? +} + enum OnboardingStep { // Introductory onboarding (Library) WELCOME diff --git a/autogpt_platform/backend/test/sdk/test_sdk_patching.py b/autogpt_platform/backend/test/sdk/test_sdk_patching.py index 42ea47bb43..96d647a549 100644 --- a/autogpt_platform/backend/test/sdk/test_sdk_patching.py +++ b/autogpt_platform/backend/test/sdk/test_sdk_patching.py @@ -52,7 +52,8 @@ class TestWebhookPatching: """Clear registry.""" AutoRegistry.clear() - def test_webhook_manager_patching(self): + @pytest.mark.asyncio + async def test_webhook_manager_patching(self): """Test that webhook managers are correctly patched.""" # Mock the original load_webhook_managers function @@ -75,7 +76,7 @@ class TestWebhookPatching: with patch.dict( "sys.modules", {"backend.integrations.webhooks": mock_webhooks_module} ): - AutoRegistry.patch_integrations() + await AutoRegistry.patch_integrations() # Call the patched function result = mock_webhooks_module.load_webhook_managers() @@ -87,7 +88,8 @@ class TestWebhookPatching: assert "webhook_provider" in result assert result["webhook_provider"] == MockWebhookManager - def test_webhook_patching_no_original_function(self): + @pytest.mark.asyncio + async def test_webhook_patching_no_original_function(self): """Test webhook patching when load_webhook_managers doesn't exist.""" # Mock webhooks module without load_webhook_managers mock_webhooks_module = MagicMock(spec=[]) @@ -103,7 +105,7 @@ class TestWebhookPatching: "sys.modules", {"backend.integrations.webhooks": mock_webhooks_module} ): # Should not raise an error - AutoRegistry.patch_integrations() + await AutoRegistry.patch_integrations() # Function should not be added if it didn't exist assert not hasattr(mock_webhooks_module, "load_webhook_managers") @@ -116,7 +118,8 @@ class TestPatchingIntegration: """Clear registry.""" AutoRegistry.clear() - def test_complete_provider_registration_and_patching(self): + @pytest.mark.asyncio + async def test_complete_provider_registration_and_patching(self): """Test the complete flow from provider registration to patching.""" # Mock webhooks module mock_webhooks = MagicMock() @@ -138,7 +141,7 @@ class TestPatchingIntegration: "backend.integrations.webhooks": mock_webhooks, }, ): - AutoRegistry.patch_integrations() + await AutoRegistry.patch_integrations() # Verify webhook patching webhook_result = mock_webhooks.load_webhook_managers() diff --git a/autogpt_platform/backend/test/sdk/test_sdk_registry.py b/autogpt_platform/backend/test/sdk/test_sdk_registry.py index f82abd57cb..16adff6fba 100644 --- a/autogpt_platform/backend/test/sdk/test_sdk_registry.py +++ b/autogpt_platform/backend/test/sdk/test_sdk_registry.py @@ -25,6 +25,7 @@ from backend.sdk import ( Provider, ProviderBuilder, ) +from backend.sdk.provider import ProviderRegister class TestAutoRegistry: @@ -39,6 +40,7 @@ class TestAutoRegistry: # Create a test provider provider = Provider( name="test_provider", + register=ProviderRegister(name="test_provider"), oauth_handler=None, webhook_manager=None, default_credentials=[], @@ -78,6 +80,7 @@ class TestAutoRegistry: default_credentials=[], base_costs=[], supported_auth_types={"oauth2"}, + register=ProviderRegister(name="oauth_provider"), ) AutoRegistry.register_provider(provider) @@ -95,6 +98,7 @@ class TestAutoRegistry: provider = Provider( name="webhook_provider", + register=ProviderRegister(name="webhook_provider"), oauth_handler=None, webhook_manager=TestWebhookManager, default_credentials=[], @@ -128,6 +132,7 @@ class TestAutoRegistry: provider = Provider( name="test_provider", + register=ProviderRegister(name="test_provider"), oauth_handler=None, webhook_manager=None, default_credentials=[cred1, cred2], @@ -194,6 +199,7 @@ class TestAutoRegistry: ): provider1 = Provider( name="provider1", + register=ProviderRegister(name="provider1"), oauth_config=OAuthConfig( oauth_handler=TestOAuth1, client_id_env_var="TEST_CLIENT_ID", @@ -207,6 +213,7 @@ class TestAutoRegistry: provider2 = Provider( name="provider2", + register=ProviderRegister(name="provider2"), oauth_config=OAuthConfig( oauth_handler=TestOAuth2, client_id_env_var="TEST_CLIENT_ID", @@ -253,6 +260,7 @@ class TestAutoRegistry: # Add some registrations provider = Provider( name="test_provider", + register=ProviderRegister(name="test_provider"), oauth_handler=None, webhook_manager=None, default_credentials=[], @@ -282,7 +290,8 @@ class TestAutoRegistryPatching: AutoRegistry.clear() @patch("backend.integrations.webhooks.load_webhook_managers") - def test_webhook_manager_patching(self, mock_load_managers): + @pytest.mark.asyncio + async def test_webhook_manager_patching(self, mock_load_managers): """Test that webhook managers are patched into the system.""" # Set up the mock to return an empty dict mock_load_managers.return_value = {} @@ -294,6 +303,7 @@ class TestAutoRegistryPatching: # Register a provider with webhooks provider = Provider( name="webhook_provider", + register=ProviderRegister(name="webhook_provider"), oauth_handler=None, webhook_manager=TestWebhookManager, default_credentials=[], @@ -310,8 +320,8 @@ class TestAutoRegistryPatching: with patch.dict( "sys.modules", {"backend.integrations.webhooks": mock_webhooks} ): - # Apply patches - AutoRegistry.patch_integrations() + # Apply patches - now async + await AutoRegistry.patch_integrations() # Call the patched function result = mock_webhooks.load_webhook_managers() From a1d9b4523805a3c7f5c8a6a7cbc88eee4b2f0a21 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 14:01:37 +0200 Subject: [PATCH 011/260] updated openapi spec --- .../backend/backend/data/model.py | 3 +- .../frontend/src/app/api/openapi.json | 48 ++++++++++++++++--- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py index 846b076546..1a34cde65d 100644 --- a/autogpt_platform/backend/backend/data/model.py +++ b/autogpt_platform/backend/backend/data/model.py @@ -344,9 +344,10 @@ class APIKeyCredentials(_BaseCredentials): default=None, description="Unix timestamp (seconds) indicating when the API key expires (if at all)", ) - api_key_env_var: Optional[str] = None """Unix timestamp (seconds) indicating when the API key expires (if at all)""" + api_key_env_var: Optional[str] = Field(default=None, exclude=True) + def auth_header(self) -> str: return f"Bearer {self.api_key.get_secret_value()}" diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 39494b1b1f..f36a48cfd6 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -207,7 +207,7 @@ "schema": { "oneOf": [ { "$ref": "#/components/schemas/OAuth2Credentials" }, - { "$ref": "#/components/schemas/APIKeyCredentials" }, + { "$ref": "#/components/schemas/APIKeyCredentials-Input" }, { "$ref": "#/components/schemas/UserPasswordCredentials" }, { "$ref": "#/components/schemas/HostScopedCredentials-Input" } ], @@ -215,7 +215,7 @@ "propertyName": "type", "mapping": { "oauth2": "#/components/schemas/OAuth2Credentials", - "api_key": "#/components/schemas/APIKeyCredentials", + "api_key": "#/components/schemas/APIKeyCredentials-Input", "user_password": "#/components/schemas/UserPasswordCredentials", "host_scoped": "#/components/schemas/HostScopedCredentials-Input" } @@ -233,7 +233,7 @@ "schema": { "oneOf": [ { "$ref": "#/components/schemas/OAuth2Credentials" }, - { "$ref": "#/components/schemas/APIKeyCredentials" }, + { "$ref": "#/components/schemas/APIKeyCredentials-Output" }, { "$ref": "#/components/schemas/UserPasswordCredentials" }, { "$ref": "#/components/schemas/HostScopedCredentials-Output" @@ -243,7 +243,7 @@ "propertyName": "type", "mapping": { "oauth2": "#/components/schemas/OAuth2Credentials", - "api_key": "#/components/schemas/APIKeyCredentials", + "api_key": "#/components/schemas/APIKeyCredentials-Output", "user_password": "#/components/schemas/UserPasswordCredentials", "host_scoped": "#/components/schemas/HostScopedCredentials-Output" } @@ -302,7 +302,7 @@ "schema": { "oneOf": [ { "$ref": "#/components/schemas/OAuth2Credentials" }, - { "$ref": "#/components/schemas/APIKeyCredentials" }, + { "$ref": "#/components/schemas/APIKeyCredentials-Output" }, { "$ref": "#/components/schemas/UserPasswordCredentials" }, { "$ref": "#/components/schemas/HostScopedCredentials-Output" @@ -312,7 +312,7 @@ "propertyName": "type", "mapping": { "oauth2": "#/components/schemas/OAuth2Credentials", - "api_key": "#/components/schemas/APIKeyCredentials", + "api_key": "#/components/schemas/APIKeyCredentials-Output", "user_password": "#/components/schemas/UserPasswordCredentials", "host_scoped": "#/components/schemas/HostScopedCredentials-Output" } @@ -4782,7 +4782,41 @@ }, "components": { "schemas": { - "APIKeyCredentials": { + "APIKeyCredentials-Input": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "provider": { "type": "string", "title": "Provider" }, + "title": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Title" + }, + "type": { + "type": "string", + "const": "api_key", + "title": "Type", + "default": "api_key" + }, + "api_key": { + "type": "string", + "format": "password", + "title": "Api Key", + "writeOnly": true + }, + "expires_at": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Expires At", + "description": "Unix timestamp (seconds) indicating when the API key expires (if at all)" + }, + "api_key_env_var": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Api Key Env Var" + } + }, + "type": "object", + "required": ["provider", "api_key"], + "title": "APIKeyCredentials" + }, + "APIKeyCredentials-Output": { "properties": { "id": { "type": "string", "title": "Id" }, "provider": { "type": "string", "title": "Provider" }, From 2dc0c97a5251efa079a82b87c204befaced67ba4 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 16:49:04 +0200 Subject: [PATCH 012/260] Add block registry and updated --- .../backend/backend/data/block.py | 186 +++++++++++++++-- .../backend/backend/data/block_test.py | 191 ++++++++++++++++++ .../backend/backend/data/model.py | 4 +- autogpt_platform/backend/backend/sdk/db.py | 86 +++++++- .../backend/backend/sdk/db_test.py | 127 ++++++++++++ .../backend/backend/sdk/registry.py | 4 +- .../backend/backend/server/rest_api.py | 4 + .../migration.sql | 14 -- .../migration.sql | 31 +++ autogpt_platform/backend/schema.prisma | 12 ++ .../useAPIKeyCredentialsModal.ts | 4 +- 11 files changed, 627 insertions(+), 36 deletions(-) create mode 100644 autogpt_platform/backend/backend/data/block_test.py create mode 100644 autogpt_platform/backend/backend/sdk/db_test.py delete mode 100644 autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql create mode 100644 autogpt_platform/backend/migrations/20251017130227_add_block_provider_registry/migration.sql diff --git a/autogpt_platform/backend/backend/data/block.py b/autogpt_platform/backend/backend/data/block.py index b96211a829..1bf378dc5f 100644 --- a/autogpt_platform/backend/backend/data/block.py +++ b/autogpt_platform/backend/backend/data/block.py @@ -1,3 +1,4 @@ +import collections import inspect import logging import os @@ -9,6 +10,7 @@ from typing import ( Any, Callable, ClassVar, + Dict, Generic, Optional, Sequence, @@ -20,7 +22,8 @@ from typing import ( import jsonref import jsonschema -from prisma.models import AgentBlock +from prisma import Json +from prisma.models import AgentBlock, BlocksRegistry from prisma.types import AgentBlockCreateInput from pydantic import BaseModel @@ -479,19 +482,50 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]): return self.__class__.__name__ def to_dict(self): - return { - "id": self.id, - "name": self.name, - "inputSchema": self.input_schema.jsonschema(), - "outputSchema": self.output_schema.jsonschema(), - "description": self.description, - "categories": [category.dict() for category in self.categories], - "contributors": [ - contributor.model_dump() for contributor in self.contributors - ], - "staticOutput": self.static_output, - "uiType": self.block_type.value, - } + # Sort categories by their name to ensure consistent ordering + sorted_categories = [ + category.dict() + for category in sorted(self.categories, key=lambda c: c.name) + ] + + # Sort dictionary keys recursively for consistent ordering + def sort_dict(obj): + if isinstance(obj, dict): + return collections.OrderedDict( + sorted((k, sort_dict(v)) for k, v in obj.items()) + ) + elif isinstance(obj, list): + # Check if all items in the list are primitive types that can be sorted + if obj and all( + isinstance(item, (str, int, float, bool, type(None))) + for item in obj + ): + # Sort primitive lists for consistent ordering + return sorted(obj, key=lambda x: (x is None, str(x))) + else: + # For lists of complex objects, process each item but maintain order + return [sort_dict(item) for item in obj] + return obj + + return collections.OrderedDict( + [ + ("id", self.id), + ("name", self.name), + ("inputSchema", sort_dict(self.input_schema.jsonschema())), + ("outputSchema", sort_dict(self.output_schema.jsonschema())), + ("description", self.description), + ("categories", sorted_categories), + ( + "contributors", + sorted( + [contributor.model_dump() for contributor in self.contributors], + key=lambda c: (c.get("name", ""), c.get("username", "")), + ), + ), + ("staticOutput", self.static_output), + ("uiType", self.block_type.value), + ] + ) def get_info(self) -> BlockInfo: from backend.data.credit import get_block_cost @@ -738,3 +772,127 @@ def get_io_block_ids() -> Sequence[str]: for id, B in get_blocks().items() if B().block_type in (BlockType.INPUT, BlockType.OUTPUT) ] + + +async def get_block_registry() -> Dict[str, BlocksRegistry]: + """ + Retrieves the BlocksRegistry from the database and returns a dictionary mapping + block names to BlocksRegistry objects. + + Returns: + Dict[str, BlocksRegistry]: A dictionary where each key is a block name and + each value is a BlocksRegistry instance. + """ + blocks = await BlocksRegistry.prisma().find_many() + return {block.id: block for block in blocks} + + +def recursive_json_compare( + db_block_definition: Any, local_block_definition: Any +) -> bool: + """ + Recursively compares two JSON objects for equality. + + Args: + db_block_definition (Any): The JSON object from the database. + local_block_definition (Any): The local JSON object to compare against. + + Returns: + bool: True if the objects are equal, False otherwise. + """ + if isinstance(db_block_definition, dict) and isinstance( + local_block_definition, dict + ): + if set(db_block_definition.keys()) != set(local_block_definition.keys()): + logger.error( + f"Keys are not the same: {set(db_block_definition.keys())} != {set(local_block_definition.keys())}" + ) + return False + return all( + recursive_json_compare(db_block_definition[k], local_block_definition[k]) + for k in db_block_definition + ) + values_are_same = db_block_definition == local_block_definition + if not values_are_same: + logger.error( + f"Values are not the same: {db_block_definition} != {local_block_definition}" + ) + return values_are_same + + +def check_block_same(db_block: BlocksRegistry, local_block: Block) -> bool: + """ + Compares a database block with a local block. + + Args: + db_block (BlocksRegistry): The block object from the database registry. + local_block (Block[BlockSchema, BlockSchema]): The local block definition. + + Returns: + bool: True if the blocks are equal, False otherwise. + """ + local_block_instance = local_block() # type: ignore + local_block_definition = local_block_instance.to_dict() + db_block_definition = db_block.definition + logger.info( + f"Checking if block {local_block_instance.name} is the same as the database block {db_block.name}" + ) + is_same = recursive_json_compare(db_block_definition, local_block_definition) + return is_same + + +def find_delta_blocks( + db_blocks: Dict[str, BlocksRegistry], local_blocks: Dict[str, Block] +) -> Dict[str, Block]: + """ + Finds the set of blocks that are new or changed compared to the database. + + Args: + db_blocks (Dict[str, BlocksRegistry]): Existing blocks from the database, keyed by name. + local_blocks (Dict[str, Block]): Local block definitions, keyed by name. + + Returns: + Dict[str, Block]: Blocks that are missing from or different than the database, keyed by name. + """ + block_update: Dict[str, Block] = {} + for block_id, block in local_blocks.items(): + if block_id not in db_blocks: + block_update[block_id] = block + else: + if not check_block_same(db_blocks[block_id], block): + block_update[block_id] = block + return block_update + + +async def upsert_blocks_change_bulk(blocks: Dict[str, Block]): + """ + Bulk upserts blocks into the database if changed. + + - Compares the provided local blocks to those in the database via their definition. + - Inserts new or updated blocks. + + Args: + blocks (Dict[str, Block]): Local block definitions to upsert. + + Returns: + Dict[str, Block]: Blocks that were new or changed and upserted. + """ + db_blocks = await get_block_registry() + block_update = find_delta_blocks(db_blocks, blocks) + logger.error(f"Upserting {len(block_update)} blocks of {len(blocks)} total blocks") + for block_id, block in block_update.items(): + await BlocksRegistry.prisma().upsert( + where={"id": block_id}, + data={ + "create": { + "id": block_id, + "name": block().__class__.__name__, # type: ignore + "definition": Json(block.to_dict(block())), # type: ignore + }, + "update": { + "name": block().__class__.__name__, # type: ignore + "definition": Json(block.to_dict(block())), # type: ignore + }, + }, + ) + return block_update diff --git a/autogpt_platform/backend/backend/data/block_test.py b/autogpt_platform/backend/backend/data/block_test.py new file mode 100644 index 0000000000..aff481e285 --- /dev/null +++ b/autogpt_platform/backend/backend/data/block_test.py @@ -0,0 +1,191 @@ +import json +from datetime import datetime + +import pytest +from prisma.models import BlocksRegistry + +from backend.blocks.basic import ( + FileStoreBlock, + PrintToConsoleBlock, + ReverseListOrderBlock, + StoreValueBlock, +) +from backend.data.block import ( + check_block_same, + find_delta_blocks, + recursive_json_compare, +) + + +@pytest.mark.asyncio +async def test_recursive_json_compare(): + db_block_definition = { + "a": 1, + "b": 2, + "c": 3, + } + local_block_definition = { + "a": 1, + "b": 2, + "c": 3, + } + assert recursive_json_compare(db_block_definition, local_block_definition) + assert not recursive_json_compare( + db_block_definition, {**local_block_definition, "d": 4} + ) + assert not recursive_json_compare( + db_block_definition, {**local_block_definition, "a": 2} + ) + assert not recursive_json_compare( + db_block_definition, {**local_block_definition, "b": 3} + ) + assert not recursive_json_compare( + db_block_definition, {**local_block_definition, "c": 4} + ) + assert not recursive_json_compare( + db_block_definition, {**local_block_definition, "a": 1, "b": 2, "c": 3, "d": 4} + ) + assert recursive_json_compare({}, {}) + assert recursive_json_compare({"a": 1}, {"a": 1}) + assert not recursive_json_compare({"a": 1}, {"b": 1}) + assert not recursive_json_compare({"a": 1}, {"a": 2}) + assert not recursive_json_compare({"a": 1}, {"a": [1, 2]}) + assert not recursive_json_compare({"a": 1}, {"a": {"b": 1}}) + assert not recursive_json_compare({"a": 1}, {"a": {"b": 2}}) + assert not recursive_json_compare({"a": 1}, {"a": {"b": [1, 2]}}) + assert not recursive_json_compare({"a": 1}, {"a": {"b": {"c": 1}}}) + assert not recursive_json_compare({"a": 1}, {"a": {"b": {"c": 2}}}) + + +@pytest.mark.asyncio +async def test_check_block_same(): + local_block = PrintToConsoleBlock() + db_block = BlocksRegistry( + id="f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c", + name=local_block.__class__.__name__, + definition=json.dumps(local_block.to_dict()), # type: ignore To much type magic going on here + updatedAt=datetime.now(), + ) + assert check_block_same(db_block, local_block) + + +@pytest.mark.asyncio +async def test_check_block_not_same(): + local_block = PrintToConsoleBlock() + local_block_data = local_block.to_dict() + local_block_data["description"] = "Hello, World!" + + db_block = BlocksRegistry( + id="f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c", + name=local_block.__class__.__name__, + definition=json.dumps(local_block_data), # type: ignore To much type magic going on here + updatedAt=datetime.now(), + ) + assert not check_block_same(db_block, local_block) + + +@pytest.mark.asyncio +async def test_find_delta_blocks(): + now = datetime.now() + store_value_block = StoreValueBlock() + local_blocks = { + PrintToConsoleBlock().id: PrintToConsoleBlock(), + ReverseListOrderBlock().id: ReverseListOrderBlock(), + FileStoreBlock().id: FileStoreBlock(), + store_value_block.id: store_value_block, + } + db_blocks = { + PrintToConsoleBlock().id: BlocksRegistry( + id=PrintToConsoleBlock().id, + name=PrintToConsoleBlock().__class__.__name__, + definition=json.dumps(PrintToConsoleBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + ReverseListOrderBlock().id: BlocksRegistry( + id=ReverseListOrderBlock().id, + name=ReverseListOrderBlock().__class__.__name__, + definition=json.dumps(ReverseListOrderBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + FileStoreBlock().id: BlocksRegistry( + id=FileStoreBlock().id, + name=FileStoreBlock().__class__.__name__, + definition=json.dumps(FileStoreBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + } + delta_blocks = find_delta_blocks(db_blocks, local_blocks) + assert len(delta_blocks) == 1 + assert store_value_block.id in delta_blocks.keys() + assert delta_blocks[store_value_block.id] == store_value_block + + +@pytest.mark.asyncio +async def test_find_delta_blocks_block_updated(): + now = datetime.now() + store_value_block = StoreValueBlock() + print_to_console_block_definition = PrintToConsoleBlock().to_dict() + print_to_console_block_definition["description"] = "Hello, World!" + local_blocks = { + PrintToConsoleBlock().id: PrintToConsoleBlock(), + ReverseListOrderBlock().id: ReverseListOrderBlock(), + FileStoreBlock().id: FileStoreBlock(), + store_value_block.id: store_value_block, + } + db_blocks = { + PrintToConsoleBlock().id: BlocksRegistry( + id=PrintToConsoleBlock().id, + name=PrintToConsoleBlock().__class__.__name__, + definition=json.dumps(print_to_console_block_definition), # type: ignore To much type magic going on here + updatedAt=now, + ), + ReverseListOrderBlock().id: BlocksRegistry( + id=ReverseListOrderBlock().id, + name=ReverseListOrderBlock().__class__.__name__, + definition=json.dumps(ReverseListOrderBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + FileStoreBlock().id: BlocksRegistry( + id=FileStoreBlock().id, + name=FileStoreBlock().__class__.__name__, + definition=json.dumps(FileStoreBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + } + delta_blocks = find_delta_blocks(db_blocks, local_blocks) + assert len(delta_blocks) == 2 + assert store_value_block.id in delta_blocks.keys() + assert delta_blocks[store_value_block.id] == store_value_block + assert PrintToConsoleBlock().id in delta_blocks.keys() + + +@pytest.mark.asyncio +async def test_find_delta_block_no_diff(): + now = datetime.now() + local_blocks = { + PrintToConsoleBlock().id: PrintToConsoleBlock(), + ReverseListOrderBlock().id: ReverseListOrderBlock(), + FileStoreBlock().id: FileStoreBlock(), + } + db_blocks = { + PrintToConsoleBlock().id: BlocksRegistry( + id=PrintToConsoleBlock().id, + name=PrintToConsoleBlock().__class__.__name__, + definition=json.dumps(PrintToConsoleBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + ReverseListOrderBlock().id: BlocksRegistry( + id=ReverseListOrderBlock().id, + name=ReverseListOrderBlock().__class__.__name__, + definition=json.dumps(ReverseListOrderBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + FileStoreBlock().id: BlocksRegistry( + id=FileStoreBlock().id, + name=FileStoreBlock().__class__.__name__, + definition=json.dumps(FileStoreBlock().to_dict()), # type: ignore To much type magic going on here + updatedAt=now, + ), + } + delta_blocks = find_delta_blocks(db_blocks, local_blocks) + assert len(delta_blocks) == 0 diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py index 1a34cde65d..e240f02a57 100644 --- a/autogpt_platform/backend/backend/data/model.py +++ b/autogpt_platform/backend/backend/data/model.py @@ -523,13 +523,13 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]): if hasattr(model_class, "allowed_providers") and hasattr( model_class, "allowed_cred_types" ): - allowed_providers = model_class.allowed_providers() + allowed_providers = sorted(model_class.allowed_providers()) # If no specific providers (None), allow any string if allowed_providers is None: schema["credentials_provider"] = ["string"] # Allow any string provider else: schema["credentials_provider"] = allowed_providers - schema["credentials_types"] = model_class.allowed_cred_types() + schema["credentials_types"] = sorted(model_class.allowed_cred_types()) # Do not return anything, just mutate schema in place model_config = ConfigDict( diff --git a/autogpt_platform/backend/backend/sdk/db.py b/autogpt_platform/backend/backend/sdk/db.py index 292bf61275..b2cc0d1b80 100644 --- a/autogpt_platform/backend/backend/sdk/db.py +++ b/autogpt_platform/backend/backend/sdk/db.py @@ -1,16 +1,98 @@ from typing import Dict from prisma import Prisma +from prisma.models import ProviderRegistry as PrismaProviderRegistry from backend.sdk.provider import ProviderRegister -async def upsert_providers_bulk(providers: Dict[str, ProviderRegister]): +def is_providers_different( + current_provider: PrismaProviderRegistry, new_provider: ProviderRegister +) -> bool: + """ + Compare a current provider (as stored in the database) against a new provider registration + and determine if they are different. This is done by converting the database model to a + ProviderRegister and checking for equality (all fields compared). + + Args: + current_provider (PrismaProviderRegistry): The provider as stored in the database. + new_provider (ProviderRegister): The provider specification to compare. + + Returns: + bool: True if the providers differ, False if they are effectively the same. + """ + current_provider_register = ProviderRegister( + name=current_provider.name, + with_oauth=current_provider.with_oauth, + client_id_env_var=current_provider.client_id_env_var, + client_secret_env_var=current_provider.client_secret_env_var, + with_api_key=current_provider.with_api_key, + api_key_env_var=current_provider.api_key_env_var, + with_user_password=current_provider.with_user_password, + username_env_var=current_provider.username_env_var, + password_env_var=current_provider.password_env_var, + ) + if current_provider_register == new_provider: + return False + return True + + +def find_delta_providers( + current_providers: Dict[str, PrismaProviderRegistry], + providers: Dict[str, ProviderRegister], +) -> Dict[str, ProviderRegister]: + """ + Identify providers that are either new or updated compared to the current providers list. + + Args: + current_providers (Dict[str, PrismaProviderRegistry]): Dictionary of current provider models keyed by provider name. + providers (Dict[str, ProviderRegister]): Dictionary of new provider registrations keyed by provider name. + + Returns: + Dict[str, ProviderRegister]: Providers that need to be added/updated in the registry. + - Includes providers not in current_providers. + - Includes providers where the data differs from what's in current_providers. + """ + provider_update = {} + for name, provider in providers.items(): + if name not in current_providers: + provider_update[name] = provider + else: + if is_providers_different(current_providers[name], provider): + provider_update[name] = provider + + return provider_update + + +async def get_providers() -> Dict[str, PrismaProviderRegistry]: + """ + Retrieve all provider registries from the database. + + Returns: + Dict[str, PrismaProviderRegistry]: Dictionary of all current providers, keyed by provider name. + """ + async with Prisma() as prisma: + providers = await prisma.providerregistry.find_many() + return { + provider.name: PrismaProviderRegistry(**provider.model_dump()) + for provider in providers + } + + +async def upsert_providers_change_bulk(providers: Dict[str, ProviderRegister]): + """ + Bulk upsert providers into the database after checking for changes. + + Args: + providers (Dict[str, ProviderRegister]): Dictionary of new provider registrations keyed by provider name. + """ + current_providers = await get_providers() + provider_update = find_delta_providers(current_providers, providers) """Async version of bulk upsert providers with all fields using transaction for atomicity""" async with Prisma() as prisma: async with prisma.tx() as tx: results = [] - for name, provider in providers.items(): + for name, provider in provider_update.items(): result = await tx.providerregistry.upsert( where={"name": name}, data={ diff --git a/autogpt_platform/backend/backend/sdk/db_test.py b/autogpt_platform/backend/backend/sdk/db_test.py new file mode 100644 index 0000000000..aa1eb335e4 --- /dev/null +++ b/autogpt_platform/backend/backend/sdk/db_test.py @@ -0,0 +1,127 @@ +from datetime import datetime + +import pytest +from prisma.models import ProviderRegistry as PrismaProviderRegistry + +from backend.sdk.db import find_delta_providers, is_providers_different +from backend.sdk.provider import ProviderRegister + + +@pytest.mark.asyncio +def test_is_providers_different_same(): + current_provider = PrismaProviderRegistry( + name="test_provider", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID", + client_secret_env_var="TEST_CLIENT_SECRET", + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + updatedAt=datetime.now(), + ) + new_provider = ProviderRegister( + name="test_provider", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID", + client_secret_env_var="TEST_CLIENT_SECRET", + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + ) + assert not is_providers_different(current_provider, new_provider) + + +@pytest.mark.asyncio +def test_is_providers_different_different(): + current_provider = PrismaProviderRegistry( + name="test_provider", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID", + client_secret_env_var="TEST_CLIENT_SECRET", + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + updatedAt=datetime.now(), + ) + new_provider = ProviderRegister( + name="test_provider", + with_oauth=False, + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + ) + assert is_providers_different(current_provider, new_provider) + + +@pytest.mark.asyncio +def test_find_delta_providers(): + current_providers = { + "test_provider": PrismaProviderRegistry( + name="test_provider", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID", + client_secret_env_var="TEST_CLIENT_SECRET", + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + updatedAt=datetime.now(), + ), + "test_provider_2": PrismaProviderRegistry( + name="test_provider_2", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID_2", + client_secret_env_var="TEST_CLIENT_SECRET_2", + with_api_key=True, + api_key_env_var="TEST_API_KEY_2", + with_user_password=True, + username_env_var="TEST_USERNAME_2", + password_env_var="TEST_PASSWORD_2", + updatedAt=datetime.now(), + ), + } + new_providers = { + "test_provider": ProviderRegister( + name="test_provider", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID", + client_secret_env_var="TEST_CLIENT_SECRET", + with_api_key=True, + api_key_env_var="TEST_API_KEY", + with_user_password=True, + username_env_var="TEST_USERNAME", + password_env_var="TEST_PASSWORD", + ), + "test_provider_2": ProviderRegister( + name="test_provider_2", + with_oauth=False, + with_api_key=True, + api_key_env_var="TEST_API_KEY_2", + with_user_password=True, + username_env_var="TEST_USERNAME_2", + password_env_var="TEST_PASSWORD_2", + ), + "test_provider_3": ProviderRegister( + name="test_provider_3", + with_oauth=True, + client_id_env_var="TEST_CLIENT_ID_3", + client_secret_env_var="TEST_CLIENT_SECRET_3", + with_api_key=False, + with_user_password=True, + username_env_var="TEST_USERNAME_3", + password_env_var="TEST_PASSWORD_3", + ), + } + assert find_delta_providers(current_providers, new_providers) == { + "test_provider_2": new_providers["test_provider_2"], + "test_provider_3": new_providers["test_provider_3"], + } diff --git a/autogpt_platform/backend/backend/sdk/registry.py b/autogpt_platform/backend/backend/sdk/registry.py index 9980f936a4..637cb8aedc 100644 --- a/autogpt_platform/backend/backend/sdk/registry.py +++ b/autogpt_platform/backend/backend/sdk/registry.py @@ -13,7 +13,7 @@ from backend.data.model import Credentials from backend.integrations.oauth.base import BaseOAuthHandler from backend.integrations.providers import ProviderName from backend.integrations.webhooks._base import BaseWebhooksManager -from backend.sdk.db import upsert_providers_bulk +from backend.sdk.db import upsert_providers_change_bulk from backend.sdk.provider import ProviderRegister if TYPE_CHECKING: @@ -282,7 +282,7 @@ class AutoRegistry: cls._provider_registry[item.provider] = new_cred - await upsert_providers_bulk(providers=cls._provider_registry) + await upsert_providers_change_bulk(providers=cls._provider_registry) if hasattr(creds_store, "IntegrationCredentialsStore"): store_class = creds_store.IntegrationCredentialsStore diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 7b70d64488..64e61ae03e 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -16,6 +16,7 @@ from fastapi.middleware.gzip import GZipMiddleware from fastapi.routing import APIRoute from prisma.errors import PrismaError +import backend.blocks import backend.data.block import backend.data.db import backend.data.graph @@ -99,6 +100,9 @@ async def lifespan_context(app: fastapi.FastAPI): await backend.data.block.initialize_blocks() + blocks = backend.blocks.load_all_blocks() + + await backend.data.block.upsert_blocks_change_bulk(blocks) await backend.data.user.migrate_and_encrypt_user_integrations() await backend.data.graph.fix_llm_provider_credentials() await backend.data.graph.migrate_llm_models(LlmModel.GPT4O) diff --git a/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql b/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql deleted file mode 100644 index 1f0c3b1239..0000000000 --- a/autogpt_platform/backend/migrations/20251017113143_provider_registry/migration.sql +++ /dev/null @@ -1,14 +0,0 @@ --- CreateTable -CREATE TABLE "ProviderRegistry" ( - "name" TEXT NOT NULL, - "with_oauth" BOOLEAN NOT NULL DEFAULT false, - "client_id_env_var" TEXT, - "client_secret_env_var" TEXT, - "with_api_key" BOOLEAN NOT NULL DEFAULT false, - "api_key_env_var" TEXT, - "with_user_password" BOOLEAN NOT NULL DEFAULT false, - "username_env_var" TEXT, - "password_env_var" TEXT, - - CONSTRAINT "ProviderRegistry_pkey" PRIMARY KEY ("name") -); diff --git a/autogpt_platform/backend/migrations/20251017130227_add_block_provider_registry/migration.sql b/autogpt_platform/backend/migrations/20251017130227_add_block_provider_registry/migration.sql new file mode 100644 index 0000000000..d6f2678273 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251017130227_add_block_provider_registry/migration.sql @@ -0,0 +1,31 @@ +-- CreateTable +CREATE TABLE "ProviderRegistry" ( + "name" TEXT NOT NULL, + "with_oauth" BOOLEAN NOT NULL DEFAULT false, + "client_id_env_var" TEXT, + "client_secret_env_var" TEXT, + "with_api_key" BOOLEAN NOT NULL DEFAULT false, + "api_key_env_var" TEXT, + "with_user_password" BOOLEAN NOT NULL DEFAULT false, + "username_env_var" TEXT, + "password_env_var" TEXT, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "ProviderRegistry_pkey" PRIMARY KEY ("name") +); + +-- CreateTable +CREATE TABLE "BlocksRegistry" ( + "id" TEXT NOT NULL, + "name" TEXT NOT NULL, + "definition" JSONB NOT NULL, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "BlocksRegistry_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE INDEX "ProviderRegistry_updatedAt_idx" ON "ProviderRegistry"("updatedAt"); + +-- CreateIndex +CREATE INDEX "BlocksRegistry_updatedAt_idx" ON "BlocksRegistry"("updatedAt"); diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 4a507fe6a0..9b95178d6a 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -74,6 +74,18 @@ model ProviderRegistry { with_user_password Boolean @default(false) username_env_var String? password_env_var String? + updatedAt DateTime @updatedAt + + @@index([updatedAt]) +} + +model BlocksRegistry { + id String @id @default(uuid()) + name String + definition Json + updatedAt DateTime @updatedAt + + @@index([updatedAt]) } enum OnboardingStep { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts index 499cacf1ce..e2a176f684 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts @@ -7,7 +7,7 @@ import { usePostV1CreateCredentials, } from "@/app/api/__generated__/endpoints/integrations/integrations"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { APIKeyCredentials } from "@/app/api/__generated__/models/aPIKeyCredentials"; +import { APIKeyCredentialsInput } from "@/app/api/__generated__/models/aPIKeyCredentialsInput"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -89,7 +89,7 @@ export function useAPIKeyCredentialsModal({ api_key: values.apiKey, title: values.title, expires_at: expiresAt, - } as APIKeyCredentials, + } as APIKeyCredentialsInput, }); } From 8f861b1bb21e29f56c938db355b0b9944f195b3b Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 17:08:17 +0200 Subject: [PATCH 013/260] removed error handling from routes --- .../backend/backend/server/v2/store/routes.py | 375 ++++++------------ 1 file changed, 114 insertions(+), 261 deletions(-) diff --git a/autogpt_platform/backend/backend/server/v2/store/routes.py b/autogpt_platform/backend/backend/server/v2/store/routes.py index 5dca0b22df..1dc0734e7f 100644 --- a/autogpt_platform/backend/backend/server/v2/store/routes.py +++ b/autogpt_platform/backend/backend/server/v2/store/routes.py @@ -40,23 +40,13 @@ async def get_profile( Get the profile details for the authenticated user. Cached for 1 hour per user. """ - try: - profile = await backend.server.v2.store.db.get_user_profile(user_id) - if profile is None: - return fastapi.responses.JSONResponse( - status_code=404, - content={"detail": "Profile not found"}, - ) - return profile - except Exception as e: - logger.exception("Failed to fetch user profile for %s: %s", user_id, e) + profile = await backend.server.v2.store.db.get_user_profile(user_id) + if profile is None: return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "Failed to retrieve user profile", - "hint": "Check database connection.", - }, + status_code=404, + content={"detail": "Profile not found"}, ) + return profile @router.post( @@ -83,20 +73,10 @@ async def update_or_create_profile( Raises: HTTPException: If there is an error updating the profile """ - try: - updated_profile = await backend.server.v2.store.db.update_profile( - user_id=user_id, profile=profile - ) - return updated_profile - except Exception as e: - logger.exception("Failed to update profile for user %s: %s", user_id, e) - return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "Failed to update user profile", - "hint": "Validate request data.", - }, - ) + updated_profile = await backend.server.v2.store.db.update_profile( + user_id=user_id, profile=profile + ) + return updated_profile ############################################## @@ -155,26 +135,16 @@ async def get_agents( status_code=422, detail="Page size must be greater than 0" ) - try: - agents = await store_cache._get_cached_store_agents( - featured=featured, - creator=creator, - sorted_by=sorted_by, - search_query=search_query, - category=category, - page=page, - page_size=page_size, - ) - return agents - except Exception as e: - logger.exception("Failed to retrieve store agents: %s", e) - return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "Failed to retrieve store agents", - "hint": "Check database or search parameters.", - }, - ) + agents = await store_cache._get_cached_store_agents( + featured=featured, + creator=creator, + sorted_by=sorted_by, + search_query=search_query, + category=category, + page=page, + page_size=page_size, + ) + return agents @router.get( @@ -189,22 +159,13 @@ async def get_agent(username: str, agent_name: str): It returns the store listing agents details. """ - try: - username = urllib.parse.unquote(username).lower() - # URL decode the agent name since it comes from the URL path - agent_name = urllib.parse.unquote(agent_name).lower() - agent = await store_cache._get_cached_agent_details( - username=username, agent_name=agent_name - ) - return agent - except Exception: - logger.exception("Exception occurred whilst getting store agent details") - return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "An error occurred while retrieving the store agent details" - }, - ) + username = urllib.parse.unquote(username).lower() + # URL decode the agent name since it comes from the URL path + agent_name = urllib.parse.unquote(agent_name).lower() + agent = await store_cache._get_cached_agent_details( + username=username, agent_name=agent_name + ) + return agent @router.get( @@ -217,17 +178,10 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s """ Get Agent Graph from Store Listing Version ID. """ - try: - graph = await backend.server.v2.store.db.get_available_graph( - store_listing_version_id - ) - return graph - except Exception: - logger.exception("Exception occurred whilst getting agent graph") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while retrieving the agent graph"}, - ) + graph = await backend.server.v2.store.db.get_available_graph( + store_listing_version_id + ) + return graph @router.get( @@ -241,18 +195,11 @@ async def get_store_agent(store_listing_version_id: str): """ Get Store Agent Details from Store Listing Version ID. """ - try: - agent = await backend.server.v2.store.db.get_store_agent_by_version_id( - store_listing_version_id - ) + agent = await backend.server.v2.store.db.get_store_agent_by_version_id( + store_listing_version_id + ) - return agent - except Exception: - logger.exception("Exception occurred whilst getting store agent") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while retrieving the store agent"}, - ) + return agent @router.post( @@ -280,24 +227,17 @@ async def create_review( Returns: The created review """ - try: - username = urllib.parse.unquote(username).lower() - agent_name = urllib.parse.unquote(agent_name).lower() - # Create the review - created_review = await backend.server.v2.store.db.create_store_review( - user_id=user_id, - store_listing_version_id=review.store_listing_version_id, - score=review.score, - comments=review.comments, - ) + username = urllib.parse.unquote(username).lower() + agent_name = urllib.parse.unquote(agent_name).lower() + # Create the review + created_review = await backend.server.v2.store.db.create_store_review( + user_id=user_id, + store_listing_version_id=review.store_listing_version_id, + score=review.score, + comments=review.comments, + ) - return created_review - except Exception: - logger.exception("Exception occurred whilst creating store review") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while creating the store review"}, - ) + return created_review ############################################## @@ -340,21 +280,14 @@ async def get_creators( status_code=422, detail="Page size must be greater than 0" ) - try: - creators = await store_cache._get_cached_store_creators( - featured=featured, - search_query=search_query, - sorted_by=sorted_by, - page=page, - page_size=page_size, - ) - return creators - except Exception: - logger.exception("Exception occurred whilst getting store creators") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while retrieving the store creators"}, - ) + creators = await store_cache._get_cached_store_creators( + featured=featured, + search_query=search_query, + sorted_by=sorted_by, + page=page, + page_size=page_size, + ) + return creators @router.get( @@ -370,18 +303,9 @@ async def get_creator( Get the details of a creator. - Creator Details Page """ - try: - username = urllib.parse.unquote(username).lower() - creator = await store_cache._get_cached_creator_details(username=username) - return creator - except Exception: - logger.exception("Exception occurred whilst getting creator details") - return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "An error occurred while retrieving the creator details" - }, - ) + username = urllib.parse.unquote(username).lower() + creator = await store_cache._get_cached_creator_details(username=username) + return creator ############################################ @@ -404,17 +328,10 @@ async def get_my_agents( """ Get user's own agents. """ - try: - agents = await backend.server.v2.store.db.get_my_agents( - user_id, page=page, page_size=page_size - ) - return agents - except Exception: - logger.exception("Exception occurred whilst getting my agents") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while retrieving the my agents"}, - ) + agents = await backend.server.v2.store.db.get_my_agents( + user_id, page=page, page_size=page_size + ) + return agents @router.delete( @@ -438,19 +355,12 @@ async def delete_submission( Returns: bool: True if the submission was successfully deleted, False otherwise """ - try: - result = await backend.server.v2.store.db.delete_store_submission( - user_id=user_id, - submission_id=submission_id, - ) + result = await backend.server.v2.store.db.delete_store_submission( + user_id=user_id, + submission_id=submission_id, + ) - return result - except Exception: - logger.exception("Exception occurred whilst deleting store submission") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while deleting the store submission"}, - ) + return result @router.get( @@ -488,21 +398,12 @@ async def get_submissions( raise fastapi.HTTPException( status_code=422, detail="Page size must be greater than 0" ) - try: - listings = await backend.server.v2.store.db.get_store_submissions( - user_id=user_id, - page=page, - page_size=page_size, - ) - return listings - except Exception: - logger.exception("Exception occurred whilst getting store submissions") - return fastapi.responses.JSONResponse( - status_code=500, - content={ - "detail": "An error occurred while retrieving the store submissions" - }, - ) + listings = await backend.server.v2.store.db.get_store_submissions( + user_id=user_id, + page=page, + page_size=page_size, + ) + return listings @router.post( @@ -529,36 +430,23 @@ async def create_submission( Raises: HTTPException: If there is an error creating the submission """ - try: - result = await backend.server.v2.store.db.create_store_submission( - user_id=user_id, - agent_id=submission_request.agent_id, - agent_version=submission_request.agent_version, - slug=submission_request.slug, - name=submission_request.name, - video_url=submission_request.video_url, - image_urls=submission_request.image_urls, - description=submission_request.description, - instructions=submission_request.instructions, - sub_heading=submission_request.sub_heading, - categories=submission_request.categories, - changes_summary=submission_request.changes_summary or "Initial Submission", - recommended_schedule_cron=submission_request.recommended_schedule_cron, - ) + result = await backend.server.v2.store.db.create_store_submission( + user_id=user_id, + agent_id=submission_request.agent_id, + agent_version=submission_request.agent_version, + slug=submission_request.slug, + name=submission_request.name, + video_url=submission_request.video_url, + image_urls=submission_request.image_urls, + description=submission_request.description, + instructions=submission_request.instructions, + sub_heading=submission_request.sub_heading, + categories=submission_request.categories, + changes_summary=submission_request.changes_summary or "Initial Submission", + recommended_schedule_cron=submission_request.recommended_schedule_cron, + ) - return result - except backend.server.v2.store.exceptions.SlugAlreadyInUseError as e: - logger.warning("Slug already in use: %s", str(e)) - return fastapi.responses.JSONResponse( - status_code=409, - content={"detail": str(e)}, - ) - except Exception: - logger.exception("Exception occurred whilst creating store submission") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while creating the store submission"}, - ) + return result @router.put( @@ -627,36 +515,10 @@ async def upload_submission_media( Raises: HTTPException: If there is an error uploading the media """ - try: - media_url = await backend.server.v2.store.media.upload_media( - user_id=user_id, file=file - ) - return media_url - except backend.server.v2.store.exceptions.VirusDetectedError as e: - logger.warning(f"Virus detected in uploaded file: {e.threat_name}") - return fastapi.responses.JSONResponse( - status_code=400, - content={ - "detail": f"File rejected due to virus detection: {e.threat_name}", - "error_type": "virus_detected", - "threat_name": e.threat_name, - }, - ) - except backend.server.v2.store.exceptions.VirusScanError as e: - logger.error(f"Virus scanning failed: {str(e)}") - return fastapi.responses.JSONResponse( - status_code=503, - content={ - "detail": "Virus scanning service unavailable. Please try again later.", - "error_type": "virus_scan_failed", - }, - ) - except Exception: - logger.exception("Exception occurred whilst uploading submission media") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while uploading the media file"}, - ) + media_url = await backend.server.v2.store.media.upload_media( + user_id=user_id, file=file + ) + return media_url @router.post( @@ -679,44 +541,35 @@ async def generate_image( Returns: JSONResponse: JSON containing the URL of the generated image """ - try: - agent = await backend.data.graph.get_graph(agent_id, user_id=user_id) + agent = await backend.data.graph.get_graph(agent_id, user_id=user_id) - if not agent: - raise fastapi.HTTPException( - status_code=404, detail=f"Agent with ID {agent_id} not found" - ) - # Use .jpeg here since we are generating JPEG images - filename = f"agent_{agent_id}.jpeg" + if not agent: + raise fastapi.HTTPException( + status_code=404, detail=f"Agent with ID {agent_id} not found" + ) + # Use .jpeg here since we are generating JPEG images + filename = f"agent_{agent_id}.jpeg" - existing_url = await backend.server.v2.store.media.check_media_exists( - user_id, filename - ) - if existing_url: - logger.info(f"Using existing image for agent {agent_id}") - return fastapi.responses.JSONResponse(content={"image_url": existing_url}) - # Generate agent image as JPEG - image = await backend.server.v2.store.image_gen.generate_agent_image( - agent=agent - ) + existing_url = await backend.server.v2.store.media.check_media_exists( + user_id, filename + ) + if existing_url: + logger.info(f"Using existing image for agent {agent_id}") + return fastapi.responses.JSONResponse(content={"image_url": existing_url}) + # Generate agent image as JPEG + image = await backend.server.v2.store.image_gen.generate_agent_image(agent=agent) - # Create UploadFile with the correct filename and content_type - image_file = fastapi.UploadFile( - file=image, - filename=filename, - ) + # Create UploadFile with the correct filename and content_type + image_file = fastapi.UploadFile( + file=image, + filename=filename, + ) - image_url = await backend.server.v2.store.media.upload_media( - user_id=user_id, file=image_file, use_file_name=True - ) + image_url = await backend.server.v2.store.media.upload_media( + user_id=user_id, file=image_file, use_file_name=True + ) - return fastapi.responses.JSONResponse(content={"image_url": image_url}) - except Exception: - logger.exception("Exception occurred whilst generating submission image") - return fastapi.responses.JSONResponse( - status_code=500, - content={"detail": "An error occurred while generating the image"}, - ) + return fastapi.responses.JSONResponse(content={"image_url": image_url}) @router.get( From 972cbfc3dee51a66d321a6cf77e4d40e9c4bd4a8 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 17:20:05 +0200 Subject: [PATCH 014/260] fix tests --- .../backend/backend/data/block.py | 4 -- .../backend/backend/data/block_test.py | 42 +++++++++---------- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/autogpt_platform/backend/backend/data/block.py b/autogpt_platform/backend/backend/data/block.py index 1bf378dc5f..cd94c41325 100644 --- a/autogpt_platform/backend/backend/data/block.py +++ b/autogpt_platform/backend/backend/data/block.py @@ -834,9 +834,6 @@ def check_block_same(db_block: BlocksRegistry, local_block: Block) -> bool: local_block_instance = local_block() # type: ignore local_block_definition = local_block_instance.to_dict() db_block_definition = db_block.definition - logger.info( - f"Checking if block {local_block_instance.name} is the same as the database block {db_block.name}" - ) is_same = recursive_json_compare(db_block_definition, local_block_definition) return is_same @@ -879,7 +876,6 @@ async def upsert_blocks_change_bulk(blocks: Dict[str, Block]): """ db_blocks = await get_block_registry() block_update = find_delta_blocks(db_blocks, blocks) - logger.error(f"Upserting {len(block_update)} blocks of {len(blocks)} total blocks") for block_id, block in block_update.items(): await BlocksRegistry.prisma().upsert( where={"id": block_id}, diff --git a/autogpt_platform/backend/backend/data/block_test.py b/autogpt_platform/backend/backend/data/block_test.py index aff481e285..89cbbad433 100644 --- a/autogpt_platform/backend/backend/data/block_test.py +++ b/autogpt_platform/backend/backend/data/block_test.py @@ -59,29 +59,29 @@ async def test_recursive_json_compare(): @pytest.mark.asyncio async def test_check_block_same(): - local_block = PrintToConsoleBlock() + local_block_instance = PrintToConsoleBlock() db_block = BlocksRegistry( id="f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c", - name=local_block.__class__.__name__, - definition=json.dumps(local_block.to_dict()), # type: ignore To much type magic going on here + name=local_block_instance.__class__.__name__, + definition=json.dumps(local_block_instance.to_dict()), # type: ignore To much type magic going on here updatedAt=datetime.now(), ) - assert check_block_same(db_block, local_block) + assert check_block_same(db_block, PrintToConsoleBlock) # type: ignore @pytest.mark.asyncio async def test_check_block_not_same(): - local_block = PrintToConsoleBlock() - local_block_data = local_block.to_dict() + local_block_instance = PrintToConsoleBlock() + local_block_data = local_block_instance.to_dict() local_block_data["description"] = "Hello, World!" db_block = BlocksRegistry( id="f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c", - name=local_block.__class__.__name__, + name=local_block_instance.__class__.__name__, definition=json.dumps(local_block_data), # type: ignore To much type magic going on here updatedAt=datetime.now(), ) - assert not check_block_same(db_block, local_block) + assert not check_block_same(db_block, PrintToConsoleBlock) # type: ignore @pytest.mark.asyncio @@ -89,10 +89,10 @@ async def test_find_delta_blocks(): now = datetime.now() store_value_block = StoreValueBlock() local_blocks = { - PrintToConsoleBlock().id: PrintToConsoleBlock(), - ReverseListOrderBlock().id: ReverseListOrderBlock(), - FileStoreBlock().id: FileStoreBlock(), - store_value_block.id: store_value_block, + PrintToConsoleBlock().id: PrintToConsoleBlock, + ReverseListOrderBlock().id: ReverseListOrderBlock, + FileStoreBlock().id: FileStoreBlock, + store_value_block.id: StoreValueBlock, } db_blocks = { PrintToConsoleBlock().id: BlocksRegistry( @@ -117,7 +117,7 @@ async def test_find_delta_blocks(): delta_blocks = find_delta_blocks(db_blocks, local_blocks) assert len(delta_blocks) == 1 assert store_value_block.id in delta_blocks.keys() - assert delta_blocks[store_value_block.id] == store_value_block + assert delta_blocks[store_value_block.id] == StoreValueBlock @pytest.mark.asyncio @@ -127,10 +127,10 @@ async def test_find_delta_blocks_block_updated(): print_to_console_block_definition = PrintToConsoleBlock().to_dict() print_to_console_block_definition["description"] = "Hello, World!" local_blocks = { - PrintToConsoleBlock().id: PrintToConsoleBlock(), - ReverseListOrderBlock().id: ReverseListOrderBlock(), - FileStoreBlock().id: FileStoreBlock(), - store_value_block.id: store_value_block, + PrintToConsoleBlock().id: PrintToConsoleBlock, + ReverseListOrderBlock().id: ReverseListOrderBlock, + FileStoreBlock().id: FileStoreBlock, + store_value_block.id: StoreValueBlock, } db_blocks = { PrintToConsoleBlock().id: BlocksRegistry( @@ -155,7 +155,7 @@ async def test_find_delta_blocks_block_updated(): delta_blocks = find_delta_blocks(db_blocks, local_blocks) assert len(delta_blocks) == 2 assert store_value_block.id in delta_blocks.keys() - assert delta_blocks[store_value_block.id] == store_value_block + assert delta_blocks[store_value_block.id] == StoreValueBlock assert PrintToConsoleBlock().id in delta_blocks.keys() @@ -163,9 +163,9 @@ async def test_find_delta_blocks_block_updated(): async def test_find_delta_block_no_diff(): now = datetime.now() local_blocks = { - PrintToConsoleBlock().id: PrintToConsoleBlock(), - ReverseListOrderBlock().id: ReverseListOrderBlock(), - FileStoreBlock().id: FileStoreBlock(), + PrintToConsoleBlock().id: PrintToConsoleBlock, + ReverseListOrderBlock().id: ReverseListOrderBlock, + FileStoreBlock().id: FileStoreBlock, } db_blocks = { PrintToConsoleBlock().id: BlocksRegistry( From 1b69f1644d8861d1166dfda7c081ec63b0959ba8 Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 17:26:08 +0200 Subject: [PATCH 015/260] revert frontend type change --- .../models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts index e2a176f684..53e245f92b 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts @@ -7,7 +7,7 @@ import { usePostV1CreateCredentials, } from "@/app/api/__generated__/endpoints/integrations/integrations"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { APIKeyCredentialsInput } from "@/app/api/__generated__/models/aPIKeyCredentialsInput"; +import { APIKeyCredentials} from "@/app/api/__generated__/models/aPIKeyCredentials"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -89,7 +89,7 @@ export function useAPIKeyCredentialsModal({ api_key: values.apiKey, title: values.title, expires_at: expiresAt, - } as APIKeyCredentialsInput, + } as APIKeyCredentials, }); } From 3e50cbd2cbbce009f3651c8cd2b3238849b990bf Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 19:19:17 +0200 Subject: [PATCH 016/260] fix import --- .../models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts index 53e245f92b..e2a176f684 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts @@ -7,7 +7,7 @@ import { usePostV1CreateCredentials, } from "@/app/api/__generated__/endpoints/integrations/integrations"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { APIKeyCredentials} from "@/app/api/__generated__/models/aPIKeyCredentials"; +import { APIKeyCredentialsInput } from "@/app/api/__generated__/models/aPIKeyCredentialsInput"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -89,7 +89,7 @@ export function useAPIKeyCredentialsModal({ api_key: values.apiKey, title: values.title, expires_at: expiresAt, - } as APIKeyCredentials, + } as APIKeyCredentialsInput, }); } From c958c95d6b9a1d8bf5f98f0099e59cebaa8ab11f Mon Sep 17 00:00:00 2001 From: Swifty Date: Fri, 17 Oct 2025 20:36:49 +0200 Subject: [PATCH 017/260] fix incorrect type import --- .../models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts index e2a176f684..649f54a024 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/fields/CredentialField/models/APIKeyCredentialModal/useAPIKeyCredentialsModal.ts @@ -7,7 +7,7 @@ import { usePostV1CreateCredentials, } from "@/app/api/__generated__/endpoints/integrations/integrations"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { APIKeyCredentialsInput } from "@/app/api/__generated__/models/aPIKeyCredentialsInput"; +import type { PostV1CreateCredentialsBody } from "@/app/api/__generated__/models/postV1CreateCredentialsBody"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -89,7 +89,7 @@ export function useAPIKeyCredentialsModal({ api_key: values.apiKey, title: values.title, expires_at: expiresAt, - } as APIKeyCredentialsInput, + } as PostV1CreateCredentialsBody, }); } From 271a520afaae36183f171a3b8c6ad38a5cb5d1de Mon Sep 17 00:00:00 2001 From: Ubbe Date: Mon, 20 Oct 2025 16:18:04 +0400 Subject: [PATCH 018/260] feat(frontend): setup DataFast analytics (#11182) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes 🏗️ Following https://datafa.st/docs/nextjs-app-router ## Checklist 📋 ### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] We will see once we make a production deployment and get data into the platform ### For configuration changes: None --- autogpt_platform/frontend/src/app/layout.tsx | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/autogpt_platform/frontend/src/app/layout.tsx b/autogpt_platform/frontend/src/app/layout.tsx index 77973a54e6..258a08a9bd 100644 --- a/autogpt_platform/frontend/src/app/layout.tsx +++ b/autogpt_platform/frontend/src/app/layout.tsx @@ -11,6 +11,8 @@ import { Toaster } from "@/components/molecules/Toast/toaster"; import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; import { SpeedInsights } from "@vercel/speed-insights/next"; import { Analytics } from "@vercel/analytics/next"; +import { headers } from "next/headers"; +import Script from "next/script"; export const metadata: Metadata = { title: "AutoGPT Platform", @@ -22,6 +24,9 @@ export default async function RootLayout({ }: Readonly<{ children: React.ReactNode; }>) { + const headersList = await headers(); + const host = headersList.get("host") || ""; + const isPlatformDomain = host === "platform.agpt.co"; return ( + {isPlatformDomain && ( + + + +""" + + +async def create_test_app_in_db( + owner_id: str, + redirect_uri: str, +) -> dict: + """Create a temporary test OAuth application in the database""" + from prisma.models import OAuthApplication + + from backend.data import db + + # Connect to database + await db.connect() + + # Generate credentials + creds = generate_app_credentials( + name=TEST_APP_NAME, + description=TEST_APP_DESCRIPTION, + redirect_uris=[redirect_uri], + scopes=AVAILABLE_SCOPES, # All scopes for testing + ) + + # Insert into database + app = await OAuthApplication.prisma().create( + data={ + "id": creds["id"], + "name": creds["name"], + "description": creds["description"], + "clientId": creds["client_id"], + "clientSecret": creds["client_secret_hash"], + "clientSecretSalt": creds["client_secret_salt"], + "redirectUris": creds["redirect_uris"], + "grantTypes": creds["grant_types"], + "scopes": creds["scopes"], + "ownerId": owner_id, + "isActive": True, + } + ) + + click.echo(f"✓ Created test OAuth application: {app.clientId}") + + return { + "id": app.id, + "client_id": app.clientId, + "client_secret": creds["client_secret_plaintext"], + } + + +async def cleanup_test_app(app_id: str) -> None: + """Remove test application and all associated tokens from database""" + from prisma.models import ( + OAuthAccessToken, + OAuthApplication, + OAuthAuthorizationCode, + OAuthRefreshToken, + ) + + from backend.data import db + + if not db.is_connected(): + await db.connect() + + click.echo("\n🧹 Cleaning up test data...") + + # Delete authorization codes + deleted_codes = await OAuthAuthorizationCode.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_codes: + click.echo(f" Deleted {deleted_codes} authorization code(s)") + + # Delete access tokens + deleted_access = await OAuthAccessToken.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_access: + click.echo(f" Deleted {deleted_access} access token(s)") + + # Delete refresh tokens + deleted_refresh = await OAuthRefreshToken.prisma().delete_many( + where={"applicationId": app_id} + ) + if deleted_refresh: + click.echo(f" Deleted {deleted_refresh} refresh token(s)") + + # Delete the application itself + await OAuthApplication.prisma().delete(where={"id": app_id}) + click.echo(" Deleted test OAuth application") + + await db.disconnect() + click.echo("✓ Cleanup complete!") + + +def run_test_server( + port: int, + platform_url: str, + backend_url: str, + client_id: str, + client_secret: str, +) -> None: + """Run a simple HTTP server for testing OAuth flows""" + import json as json_module + import threading + from http.server import BaseHTTPRequestHandler, HTTPServer + from urllib.request import Request, urlopen + + redirect_uri = f"http://localhost:{port}/callback" + + html_content = create_test_html( + platform_url=platform_url, + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + backend_url=backend_url, + ) + + class TestHandler(BaseHTTPRequestHandler): + def do_GET(self): + from urllib.parse import parse_qs + + # Parse the path + parsed = urlparse(self.path) + + # Serve the test page for root and callback + if parsed.path in ["/", "/callback"]: + self.send_response(200) + self.send_header("Content-Type", "text/html; charset=utf-8") + self.end_headers() + self.wfile.write(html_content.encode()) + + # Proxy API calls to backend (avoids CORS issues) + # Supports both /proxy/api/* and /proxy/external-api/* + elif parsed.path.startswith("/proxy/"): + try: + # Extract the API path and token from query params + api_path = parsed.path[len("/proxy") :] + query_params = parse_qs(parsed.query) + token = query_params.get("token", [None])[0] + + headers = {} + if token: + headers["Authorization"] = f"Bearer {token}" + + req = Request( + f"{backend_url}{api_path}", + headers=headers, + method="GET", + ) + + with urlopen(req) as response: + response_body = response.read() + self.send_response(response.status) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(response_body) + + except Exception as e: + error_msg = str(e) + status_code = 500 + if hasattr(e, "code"): + status_code = e.code # type: ignore + if hasattr(e, "read"): + try: + error_body = e.read().decode() # type: ignore + error_data = json_module.loads(error_body) + error_msg = error_data.get("detail", error_msg) + except Exception: + pass + + self.send_response(status_code) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(json_module.dumps({"detail": error_msg}).encode()) + + else: + self.send_response(404) + self.end_headers() + + def do_POST(self): + # Parse the path + parsed = urlparse(self.path) + + # Proxy token exchange to backend (avoids CORS issues) + if parsed.path == "/proxy/token": + try: + # Read request body + content_length = int(self.headers.get("Content-Length", 0)) + body = self.rfile.read(content_length) + + # Forward to backend + req = Request( + f"{backend_url}/api/oauth/token", + data=body, + headers={"Content-Type": "application/json"}, + method="POST", + ) + + with urlopen(req) as response: + response_body = response.read() + self.send_response(response.status) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(response_body) + + except Exception as e: + error_msg = str(e) + # Try to extract error detail from urllib error + if hasattr(e, "read"): + try: + error_body = e.read().decode() # type: ignore + error_data = json_module.loads(error_body) + error_msg = error_data.get("detail", error_msg) + except Exception: + pass + + self.send_response(500) + self.send_header("Content-Type", "application/json") + self.end_headers() + self.wfile.write(json_module.dumps({"detail": error_msg}).encode()) + else: + self.send_response(404) + self.end_headers() + + def log_message(self, format, *args): + # Suppress default logging + pass + + server = HTTPServer(("localhost", port), TestHandler) + click.echo(f"\n🚀 Test server running at http://localhost:{port}") + click.echo(" Open this URL in your browser to test the OAuth flows\n") + + # Run server in a daemon thread + server_thread = threading.Thread(target=server.serve_forever, daemon=True) + server_thread.start() + + # Use a simple polling loop that can be interrupted + try: + while server_thread.is_alive(): + server_thread.join(timeout=1.0) + except KeyboardInterrupt: + pass + + click.echo("\n\n⏹️ Server stopped") + server.shutdown() + + +async def setup_and_cleanup_test_app( + owner_id: str, + redirect_uri: str, + port: int, + platform_url: str, + backend_url: str, +) -> None: + """ + Async context manager that handles test app lifecycle. + Creates the app, yields control to run the server, then cleans up. + """ + app_info: Optional[dict] = None + + try: + # Create test app in database + click.echo("\n📝 Creating temporary OAuth application...") + app_info = await create_test_app_in_db(owner_id, redirect_uri) + + click.echo(f"\n Client ID: {app_info['client_id']}") + click.echo(f" Client Secret: {app_info['client_secret'][:30]}...") + + # Run the test server (blocking, synchronous) + click.echo("\n" + "-" * 60) + click.echo(" Press Ctrl+C to stop the server and clean up") + click.echo("-" * 60) + + run_test_server( + port=port, + platform_url=platform_url, + backend_url=backend_url, + client_id=app_info["client_id"], + client_secret=app_info["client_secret"], + ) + + finally: + # Always clean up - we're still in the same event loop + if app_info: + try: + await cleanup_test_app(app_info["id"]) + except Exception as e: + click.echo(f"\n⚠️ Cleanup error: {e}", err=True) + click.echo( + f" You may need to manually delete app with ID: {app_info['id']}" + ) + + +@cli.command(name="test-server") +@click.option( + "--owner-id", + required=True, + help="User ID to own the temporary test OAuth application", +) +@click.option( + "--port", + default=TEST_SERVER_PORT, + help=f"Port to run the test server on (default: {TEST_SERVER_PORT})", +) +@click.option( + "--platform-url", + default="http://localhost:3000", + help="AutoGPT Platform frontend URL (default: http://localhost:3000)", +) +@click.option( + "--backend-url", + default="http://localhost:8006", + help="AutoGPT Platform backend URL (default: http://localhost:8006)", +) +def test_server_command( + owner_id: str, + port: int, + platform_url: str, + backend_url: str, +): + """Run a test server to test OAuth flows interactively + + This command: + 1. Creates a temporary OAuth application in the database + 2. Starts a minimal web server that acts as a third-party client + 3. Lets you test "Sign in with AutoGPT" and Integration Setup flows + 4. Cleans up all test data (app, tokens, codes) when you stop the server + + Example: + poetry run oauth-tool test-server --owner-id YOUR_USER_ID + + The test server will be available at http://localhost:9876 + """ + redirect_uri = f"http://localhost:{port}/callback" + + click.echo("=" * 60) + click.echo(" OAuth Test Server") + click.echo("=" * 60) + click.echo(f"\n Owner ID: {owner_id}") + click.echo(f" Platform URL: {platform_url}") + click.echo(f" Backend URL: {backend_url}") + click.echo(f" Test Server: http://localhost:{port}") + click.echo(f" Redirect URI: {redirect_uri}") + click.echo("\n" + "=" * 60) + + try: + # Run everything in a single event loop to keep Prisma client happy + asyncio.run( + setup_and_cleanup_test_app( + owner_id=owner_id, + redirect_uri=redirect_uri, + port=port, + platform_url=platform_url, + backend_url=backend_url, + ) + ) + except KeyboardInterrupt: + # Already handled inside, just exit cleanly + pass + except Exception as e: + click.echo(f"\n❌ Error: {e}", err=True) + sys.exit(1) + + +if __name__ == "__main__": + cli() diff --git a/autogpt_platform/backend/backend/data/api_key.py b/autogpt_platform/backend/backend/data/auth/api_key.py similarity index 95% rename from autogpt_platform/backend/backend/data/api_key.py rename to autogpt_platform/backend/backend/data/auth/api_key.py index 45194897de..2ecd5be9a5 100644 --- a/autogpt_platform/backend/backend/data/api_key.py +++ b/autogpt_platform/backend/backend/data/auth/api_key.py @@ -1,22 +1,24 @@ import logging import uuid from datetime import datetime, timezone -from typing import Optional +from typing import Literal, Optional from autogpt_libs.api_key.keysmith import APIKeySmith from prisma.enums import APIKeyPermission, APIKeyStatus from prisma.models import APIKey as PrismaAPIKey from prisma.types import APIKeyWhereUniqueInput -from pydantic import BaseModel, Field +from pydantic import Field from backend.data.includes import MAX_USER_API_KEYS_FETCH from backend.util.exceptions import NotAuthorizedError, NotFoundError +from .base import APIAuthorizationInfo + logger = logging.getLogger(__name__) keysmith = APIKeySmith() -class APIKeyInfo(BaseModel): +class APIKeyInfo(APIAuthorizationInfo): id: str name: str head: str = Field( @@ -26,12 +28,9 @@ class APIKeyInfo(BaseModel): description=f"The last {APIKeySmith.TAIL_LENGTH} characters of the key" ) status: APIKeyStatus - permissions: list[APIKeyPermission] - created_at: datetime - last_used_at: Optional[datetime] = None - revoked_at: Optional[datetime] = None description: Optional[str] = None - user_id: str + + type: Literal["api_key"] = "api_key" # type: ignore @staticmethod def from_db(api_key: PrismaAPIKey): @@ -41,7 +40,7 @@ class APIKeyInfo(BaseModel): head=api_key.head, tail=api_key.tail, status=APIKeyStatus(api_key.status), - permissions=[APIKeyPermission(p) for p in api_key.permissions], + scopes=[APIKeyPermission(p) for p in api_key.permissions], created_at=api_key.createdAt, last_used_at=api_key.lastUsedAt, revoked_at=api_key.revokedAt, @@ -211,7 +210,7 @@ async def suspend_api_key(key_id: str, user_id: str) -> APIKeyInfo: def has_permission(api_key: APIKeyInfo, required_permission: APIKeyPermission) -> bool: - return required_permission in api_key.permissions + return required_permission in api_key.scopes async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyInfo]: diff --git a/autogpt_platform/backend/backend/data/auth/base.py b/autogpt_platform/backend/backend/data/auth/base.py new file mode 100644 index 0000000000..e307b5f49f --- /dev/null +++ b/autogpt_platform/backend/backend/data/auth/base.py @@ -0,0 +1,15 @@ +from datetime import datetime +from typing import Literal, Optional + +from prisma.enums import APIKeyPermission +from pydantic import BaseModel + + +class APIAuthorizationInfo(BaseModel): + user_id: str + scopes: list[APIKeyPermission] + type: Literal["oauth", "api_key"] + created_at: datetime + expires_at: Optional[datetime] = None + last_used_at: Optional[datetime] = None + revoked_at: Optional[datetime] = None diff --git a/autogpt_platform/backend/backend/data/auth/oauth.py b/autogpt_platform/backend/backend/data/auth/oauth.py new file mode 100644 index 0000000000..e49586194c --- /dev/null +++ b/autogpt_platform/backend/backend/data/auth/oauth.py @@ -0,0 +1,872 @@ +""" +OAuth 2.0 Provider Data Layer + +Handles management of OAuth applications, authorization codes, +access tokens, and refresh tokens. + +Hashing strategy: +- Access tokens & Refresh tokens: SHA256 (deterministic, allows direct lookup by hash) +- Client secrets: Scrypt with salt (lookup by client_id, then verify with salt) +""" + +import hashlib +import logging +import secrets +import uuid +from datetime import datetime, timedelta, timezone +from typing import Literal, Optional + +from autogpt_libs.api_key.keysmith import APIKeySmith +from prisma.enums import APIKeyPermission as APIPermission +from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken +from prisma.models import OAuthApplication as PrismaOAuthApplication +from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode +from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken +from prisma.types import OAuthApplicationUpdateInput +from pydantic import BaseModel, Field, SecretStr + +from .base import APIAuthorizationInfo + +logger = logging.getLogger(__name__) +keysmith = APIKeySmith() # Only used for client secret hashing (Scrypt) + + +def _generate_token() -> str: + """Generate a cryptographically secure random token.""" + return secrets.token_urlsafe(32) + + +def _hash_token(token: str) -> str: + """Hash a token using SHA256 (deterministic, for direct lookup).""" + return hashlib.sha256(token.encode()).hexdigest() + + +# Token TTLs +AUTHORIZATION_CODE_TTL = timedelta(minutes=10) +ACCESS_TOKEN_TTL = timedelta(hours=1) +REFRESH_TOKEN_TTL = timedelta(days=30) + +ACCESS_TOKEN_PREFIX = "agpt_xt_" +REFRESH_TOKEN_PREFIX = "agpt_rt_" + + +# ============================================================================ +# Exception Classes +# ============================================================================ + + +class OAuthError(Exception): + """Base OAuth error""" + + pass + + +class InvalidClientError(OAuthError): + """Invalid client_id or client_secret""" + + pass + + +class InvalidGrantError(OAuthError): + """Invalid or expired authorization code/refresh token""" + + def __init__(self, reason: str): + self.reason = reason + super().__init__(f"Invalid grant: {reason}") + + +class InvalidTokenError(OAuthError): + """Invalid, expired, or revoked token""" + + def __init__(self, reason: str): + self.reason = reason + super().__init__(f"Invalid token: {reason}") + + +# ============================================================================ +# Data Models +# ============================================================================ + + +class OAuthApplicationInfo(BaseModel): + """OAuth application information (without client secret hash)""" + + id: str + name: str + description: Optional[str] = None + logo_url: Optional[str] = None + client_id: str + redirect_uris: list[str] + grant_types: list[str] + scopes: list[APIPermission] + owner_id: str + is_active: bool + created_at: datetime + updated_at: datetime + + @staticmethod + def from_db(app: PrismaOAuthApplication): + return OAuthApplicationInfo( + id=app.id, + name=app.name, + description=app.description, + logo_url=app.logoUrl, + client_id=app.clientId, + redirect_uris=app.redirectUris, + grant_types=app.grantTypes, + scopes=[APIPermission(s) for s in app.scopes], + owner_id=app.ownerId, + is_active=app.isActive, + created_at=app.createdAt, + updated_at=app.updatedAt, + ) + + +class OAuthApplicationInfoWithSecret(OAuthApplicationInfo): + """OAuth application with client secret hash (for validation)""" + + client_secret_hash: str + client_secret_salt: str + + @staticmethod + def from_db(app: PrismaOAuthApplication): + return OAuthApplicationInfoWithSecret( + **OAuthApplicationInfo.from_db(app).model_dump(), + client_secret_hash=app.clientSecret, + client_secret_salt=app.clientSecretSalt, + ) + + def verify_secret(self, plaintext_secret: str) -> bool: + """Verify a plaintext client secret against the stored hash""" + # Use keysmith.verify_key() with stored salt + return keysmith.verify_key( + plaintext_secret, self.client_secret_hash, self.client_secret_salt + ) + + +class OAuthAuthorizationCodeInfo(BaseModel): + """Authorization code information""" + + id: str + code: str + created_at: datetime + expires_at: datetime + application_id: str + user_id: str + scopes: list[APIPermission] + redirect_uri: str + code_challenge: Optional[str] = None + code_challenge_method: Optional[str] = None + used_at: Optional[datetime] = None + + @property + def is_used(self) -> bool: + return self.used_at is not None + + @staticmethod + def from_db(code: PrismaOAuthAuthorizationCode): + return OAuthAuthorizationCodeInfo( + id=code.id, + code=code.code, + created_at=code.createdAt, + expires_at=code.expiresAt, + application_id=code.applicationId, + user_id=code.userId, + scopes=[APIPermission(s) for s in code.scopes], + redirect_uri=code.redirectUri, + code_challenge=code.codeChallenge, + code_challenge_method=code.codeChallengeMethod, + used_at=code.usedAt, + ) + + +class OAuthAccessTokenInfo(APIAuthorizationInfo): + """Access token information""" + + id: str + expires_at: datetime # type: ignore + application_id: str + + type: Literal["oauth"] = "oauth" # type: ignore + + @staticmethod + def from_db(token: PrismaOAuthAccessToken): + return OAuthAccessTokenInfo( + id=token.id, + user_id=token.userId, + scopes=[APIPermission(s) for s in token.scopes], + created_at=token.createdAt, + expires_at=token.expiresAt, + last_used_at=None, + revoked_at=token.revokedAt, + application_id=token.applicationId, + ) + + +class OAuthAccessToken(OAuthAccessTokenInfo): + """Access token with plaintext token included (sensitive)""" + + token: SecretStr = Field(description="Plaintext token (sensitive)") + + @staticmethod + def from_db(token: PrismaOAuthAccessToken, plaintext_token: str): # type: ignore + return OAuthAccessToken( + **OAuthAccessTokenInfo.from_db(token).model_dump(), + token=SecretStr(plaintext_token), + ) + + +class OAuthRefreshTokenInfo(BaseModel): + """Refresh token information""" + + id: str + user_id: str + scopes: list[APIPermission] + created_at: datetime + expires_at: datetime + application_id: str + revoked_at: Optional[datetime] = None + + @property + def is_revoked(self) -> bool: + return self.revoked_at is not None + + @staticmethod + def from_db(token: PrismaOAuthRefreshToken): + return OAuthRefreshTokenInfo( + id=token.id, + user_id=token.userId, + scopes=[APIPermission(s) for s in token.scopes], + created_at=token.createdAt, + expires_at=token.expiresAt, + application_id=token.applicationId, + revoked_at=token.revokedAt, + ) + + +class OAuthRefreshToken(OAuthRefreshTokenInfo): + """Refresh token with plaintext token included (sensitive)""" + + token: SecretStr = Field(description="Plaintext token (sensitive)") + + @staticmethod + def from_db(token: PrismaOAuthRefreshToken, plaintext_token: str): # type: ignore + return OAuthRefreshToken( + **OAuthRefreshTokenInfo.from_db(token).model_dump(), + token=SecretStr(plaintext_token), + ) + + +class TokenIntrospectionResult(BaseModel): + """Result of token introspection (RFC 7662)""" + + active: bool + scopes: Optional[list[str]] = None + client_id: Optional[str] = None + user_id: Optional[str] = None + exp: Optional[int] = None # Unix timestamp + token_type: Optional[Literal["access_token", "refresh_token"]] = None + + +# ============================================================================ +# OAuth Application Management +# ============================================================================ + + +async def get_oauth_application(client_id: str) -> Optional[OAuthApplicationInfo]: + """Get OAuth application by client ID (without secret)""" + app = await PrismaOAuthApplication.prisma().find_unique( + where={"clientId": client_id} + ) + if not app: + return None + return OAuthApplicationInfo.from_db(app) + + +async def get_oauth_application_with_secret( + client_id: str, +) -> Optional[OAuthApplicationInfoWithSecret]: + """Get OAuth application by client ID (with secret hash for validation)""" + app = await PrismaOAuthApplication.prisma().find_unique( + where={"clientId": client_id} + ) + if not app: + return None + return OAuthApplicationInfoWithSecret.from_db(app) + + +async def validate_client_credentials( + client_id: str, client_secret: str +) -> OAuthApplicationInfo: + """ + Validate client credentials and return application info. + + Raises: + InvalidClientError: If client_id or client_secret is invalid, or app is inactive + """ + app = await get_oauth_application_with_secret(client_id) + if not app: + raise InvalidClientError("Invalid client_id") + + if not app.is_active: + raise InvalidClientError("Application is not active") + + # Verify client secret + if not app.verify_secret(client_secret): + raise InvalidClientError("Invalid client_secret") + + # Return without secret hash + return OAuthApplicationInfo(**app.model_dump(exclude={"client_secret_hash"})) + + +def validate_redirect_uri(app: OAuthApplicationInfo, redirect_uri: str) -> bool: + """Validate that redirect URI is registered for the application""" + return redirect_uri in app.redirect_uris + + +def validate_scopes( + app: OAuthApplicationInfo, requested_scopes: list[APIPermission] +) -> bool: + """Validate that all requested scopes are allowed for the application""" + return all(scope in app.scopes for scope in requested_scopes) + + +# ============================================================================ +# Authorization Code Flow +# ============================================================================ + + +def _generate_authorization_code() -> str: + """Generate a cryptographically secure authorization code""" + # 32 bytes = 256 bits of entropy + return secrets.token_urlsafe(32) + + +async def create_authorization_code( + application_id: str, + user_id: str, + scopes: list[APIPermission], + redirect_uri: str, + code_challenge: Optional[str] = None, + code_challenge_method: Optional[Literal["S256", "plain"]] = None, +) -> OAuthAuthorizationCodeInfo: + """ + Create a new authorization code. + Expires in 10 minutes and can only be used once. + """ + code = _generate_authorization_code() + now = datetime.now(timezone.utc) + expires_at = now + AUTHORIZATION_CODE_TTL + + saved_code = await PrismaOAuthAuthorizationCode.prisma().create( + data={ + "id": str(uuid.uuid4()), + "code": code, + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + "redirectUri": redirect_uri, + "codeChallenge": code_challenge, + "codeChallengeMethod": code_challenge_method, + } + ) + + return OAuthAuthorizationCodeInfo.from_db(saved_code) + + +async def consume_authorization_code( + code: str, + application_id: str, + redirect_uri: str, + code_verifier: Optional[str] = None, +) -> tuple[str, list[APIPermission]]: + """ + Consume an authorization code and return (user_id, scopes). + + This marks the code as used and validates: + - Code exists and matches application + - Code is not expired + - Code has not been used + - Redirect URI matches + - PKCE code verifier matches (if code challenge was provided) + + Raises: + InvalidGrantError: If code is invalid, expired, used, or PKCE fails + """ + auth_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": code} + ) + + if not auth_code: + raise InvalidGrantError("authorization code not found") + + # Validate application + if auth_code.applicationId != application_id: + raise InvalidGrantError( + "authorization code does not belong to this application" + ) + + # Check if already used + if auth_code.usedAt is not None: + raise InvalidGrantError( + f"authorization code already used at {auth_code.usedAt}" + ) + + # Check expiration + now = datetime.now(timezone.utc) + if auth_code.expiresAt < now: + raise InvalidGrantError("authorization code expired") + + # Validate redirect URI + if auth_code.redirectUri != redirect_uri: + raise InvalidGrantError("redirect_uri mismatch") + + # Validate PKCE if code challenge was provided + if auth_code.codeChallenge: + if not code_verifier: + raise InvalidGrantError("code_verifier required but not provided") + + if not _verify_pkce( + code_verifier, auth_code.codeChallenge, auth_code.codeChallengeMethod + ): + raise InvalidGrantError("PKCE verification failed") + + # Mark code as used + await PrismaOAuthAuthorizationCode.prisma().update( + where={"code": code}, + data={"usedAt": now}, + ) + + return auth_code.userId, [APIPermission(s) for s in auth_code.scopes] + + +def _verify_pkce( + code_verifier: str, code_challenge: str, code_challenge_method: Optional[str] +) -> bool: + """ + Verify PKCE code verifier against code challenge. + + Supports: + - S256: SHA256(code_verifier) == code_challenge + - plain: code_verifier == code_challenge + """ + if code_challenge_method == "S256": + # Hash the verifier with SHA256 and base64url encode + hashed = hashlib.sha256(code_verifier.encode("ascii")).digest() + computed_challenge = ( + secrets.token_urlsafe(len(hashed)).encode("ascii").decode("ascii") + ) + # For proper base64url encoding + import base64 + + computed_challenge = ( + base64.urlsafe_b64encode(hashed).decode("ascii").rstrip("=") + ) + return secrets.compare_digest(computed_challenge, code_challenge) + elif code_challenge_method == "plain" or code_challenge_method is None: + # Plain comparison + return secrets.compare_digest(code_verifier, code_challenge) + else: + logger.warning(f"Unsupported code challenge method: {code_challenge_method}") + return False + + +# ============================================================================ +# Access Token Management +# ============================================================================ + + +async def create_access_token( + application_id: str, user_id: str, scopes: list[APIPermission] +) -> OAuthAccessToken: + """ + Create a new access token. + Returns OAuthAccessToken (with plaintext token). + """ + plaintext_token = ACCESS_TOKEN_PREFIX + _generate_token() + token_hash = _hash_token(plaintext_token) + now = datetime.now(timezone.utc) + expires_at = now + ACCESS_TOKEN_TTL + + saved_token = await PrismaOAuthAccessToken.prisma().create( + data={ + "id": str(uuid.uuid4()), + "token": token_hash, # SHA256 hash for direct lookup + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + } + ) + + return OAuthAccessToken.from_db(saved_token, plaintext_token=plaintext_token) + + +async def validate_access_token( + token: str, +) -> tuple[OAuthAccessTokenInfo, OAuthApplicationInfo]: + """ + Validate an access token and return token info. + + Raises: + InvalidTokenError: If token is invalid, expired, or revoked + InvalidClientError: If the client application is not marked as active + """ + token_hash = _hash_token(token) + + # Direct lookup by hash + access_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": token_hash}, include={"Application": True} + ) + + if not access_token: + raise InvalidTokenError("access token not found") + + if not access_token.Application: # should be impossible + raise InvalidClientError("Client application not found") + + if not access_token.Application.isActive: + raise InvalidClientError("Client application is disabled") + + if access_token.revokedAt is not None: + raise InvalidTokenError("access token has been revoked") + + # Check expiration + now = datetime.now(timezone.utc) + if access_token.expiresAt < now: + raise InvalidTokenError("access token expired") + + return ( + OAuthAccessTokenInfo.from_db(access_token), + OAuthApplicationInfo.from_db(access_token.Application), + ) + + +async def revoke_access_token( + token: str, application_id: str +) -> OAuthAccessTokenInfo | None: + """ + Revoke an access token. + + Args: + token: The plaintext access token to revoke + application_id: The application ID making the revocation request. + Only tokens belonging to this application will be revoked. + + Returns: + OAuthAccessTokenInfo if token was found and revoked, None otherwise. + + Note: + Always performs exactly 2 DB queries regardless of outcome to prevent + timing side-channel attacks that could reveal token existence. + """ + try: + token_hash = _hash_token(token) + + # Use update_many to filter by both token and applicationId + updated_count = await PrismaOAuthAccessToken.prisma().update_many( + where={ + "token": token_hash, + "applicationId": application_id, + "revokedAt": None, + }, + data={"revokedAt": datetime.now(timezone.utc)}, + ) + + # Always perform second query to ensure constant time + result = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": token_hash} + ) + + # Only return result if we actually revoked something + if updated_count == 0: + return None + + return OAuthAccessTokenInfo.from_db(result) if result else None + except Exception as e: + logger.exception(f"Error revoking access token: {e}") + return None + + +# ============================================================================ +# Refresh Token Management +# ============================================================================ + + +async def create_refresh_token( + application_id: str, user_id: str, scopes: list[APIPermission] +) -> OAuthRefreshToken: + """ + Create a new refresh token. + Returns OAuthRefreshToken (with plaintext token). + """ + plaintext_token = REFRESH_TOKEN_PREFIX + _generate_token() + token_hash = _hash_token(plaintext_token) + now = datetime.now(timezone.utc) + expires_at = now + REFRESH_TOKEN_TTL + + saved_token = await PrismaOAuthRefreshToken.prisma().create( + data={ + "id": str(uuid.uuid4()), + "token": token_hash, # SHA256 hash for direct lookup + "expiresAt": expires_at, + "applicationId": application_id, + "userId": user_id, + "scopes": [s for s in scopes], + } + ) + + return OAuthRefreshToken.from_db(saved_token, plaintext_token=plaintext_token) + + +async def refresh_tokens( + refresh_token: str, application_id: str +) -> tuple[OAuthAccessToken, OAuthRefreshToken]: + """ + Use a refresh token to create new access and refresh tokens. + Returns (new_access_token, new_refresh_token) both with plaintext tokens included. + + Raises: + InvalidGrantError: If refresh token is invalid, expired, or revoked + """ + token_hash = _hash_token(refresh_token) + + # Direct lookup by hash + rt = await PrismaOAuthRefreshToken.prisma().find_unique(where={"token": token_hash}) + + if not rt: + raise InvalidGrantError("refresh token not found") + + # NOTE: no need to check Application.isActive, this is checked by the token endpoint + + if rt.revokedAt is not None: + raise InvalidGrantError("refresh token has been revoked") + + # Validate application + if rt.applicationId != application_id: + raise InvalidGrantError("refresh token does not belong to this application") + + # Check expiration + now = datetime.now(timezone.utc) + if rt.expiresAt < now: + raise InvalidGrantError("refresh token expired") + + # Revoke old refresh token + await PrismaOAuthRefreshToken.prisma().update( + where={"token": token_hash}, + data={"revokedAt": now}, + ) + + # Create new access and refresh tokens with same scopes + scopes = [APIPermission(s) for s in rt.scopes] + new_access_token = await create_access_token( + rt.applicationId, + rt.userId, + scopes, + ) + new_refresh_token = await create_refresh_token( + rt.applicationId, + rt.userId, + scopes, + ) + + return new_access_token, new_refresh_token + + +async def revoke_refresh_token( + token: str, application_id: str +) -> OAuthRefreshTokenInfo | None: + """ + Revoke a refresh token. + + Args: + token: The plaintext refresh token to revoke + application_id: The application ID making the revocation request. + Only tokens belonging to this application will be revoked. + + Returns: + OAuthRefreshTokenInfo if token was found and revoked, None otherwise. + + Note: + Always performs exactly 2 DB queries regardless of outcome to prevent + timing side-channel attacks that could reveal token existence. + """ + try: + token_hash = _hash_token(token) + + # Use update_many to filter by both token and applicationId + updated_count = await PrismaOAuthRefreshToken.prisma().update_many( + where={ + "token": token_hash, + "applicationId": application_id, + "revokedAt": None, + }, + data={"revokedAt": datetime.now(timezone.utc)}, + ) + + # Always perform second query to ensure constant time + result = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": token_hash} + ) + + # Only return result if we actually revoked something + if updated_count == 0: + return None + + return OAuthRefreshTokenInfo.from_db(result) if result else None + except Exception as e: + logger.exception(f"Error revoking refresh token: {e}") + return None + + +# ============================================================================ +# Token Introspection +# ============================================================================ + + +async def introspect_token( + token: str, + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = None, +) -> TokenIntrospectionResult: + """ + Introspect a token and return its metadata (RFC 7662). + + Returns TokenIntrospectionResult with active=True and metadata if valid, + or active=False if the token is invalid/expired/revoked. + """ + # Try as access token first (or if hint says "access_token") + if token_type_hint != "refresh_token": + try: + token_info, app = await validate_access_token(token) + return TokenIntrospectionResult( + active=True, + scopes=list(s.value for s in token_info.scopes), + client_id=app.client_id if app else None, + user_id=token_info.user_id, + exp=int(token_info.expires_at.timestamp()), + token_type="access_token", + ) + except InvalidTokenError: + pass # Try as refresh token + + # Try as refresh token + token_hash = _hash_token(token) + refresh_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": token_hash} + ) + + if refresh_token and refresh_token.revokedAt is None: + # Check if valid (not expired) + now = datetime.now(timezone.utc) + if refresh_token.expiresAt > now: + app = await get_oauth_application_by_id(refresh_token.applicationId) + return TokenIntrospectionResult( + active=True, + scopes=list(s for s in refresh_token.scopes), + client_id=app.client_id if app else None, + user_id=refresh_token.userId, + exp=int(refresh_token.expiresAt.timestamp()), + token_type="refresh_token", + ) + + # Token not found or inactive + return TokenIntrospectionResult(active=False) + + +async def get_oauth_application_by_id(app_id: str) -> Optional[OAuthApplicationInfo]: + """Get OAuth application by ID""" + app = await PrismaOAuthApplication.prisma().find_unique(where={"id": app_id}) + if not app: + return None + return OAuthApplicationInfo.from_db(app) + + +async def list_user_oauth_applications(user_id: str) -> list[OAuthApplicationInfo]: + """Get all OAuth applications owned by a user""" + apps = await PrismaOAuthApplication.prisma().find_many( + where={"ownerId": user_id}, + order={"createdAt": "desc"}, + ) + return [OAuthApplicationInfo.from_db(app) for app in apps] + + +async def update_oauth_application( + app_id: str, + *, + owner_id: str, + is_active: Optional[bool] = None, + logo_url: Optional[str] = None, +) -> Optional[OAuthApplicationInfo]: + """ + Update OAuth application active status. + Only the owner can update their app's status. + + Returns the updated app info, or None if app not found or not owned by user. + """ + # First verify ownership + app = await PrismaOAuthApplication.prisma().find_first( + where={"id": app_id, "ownerId": owner_id} + ) + if not app: + return None + + patch: OAuthApplicationUpdateInput = {} + if is_active is not None: + patch["isActive"] = is_active + if logo_url: + patch["logoUrl"] = logo_url + if not patch: + return OAuthApplicationInfo.from_db(app) # return unchanged + + updated_app = await PrismaOAuthApplication.prisma().update( + where={"id": app_id}, + data=patch, + ) + return OAuthApplicationInfo.from_db(updated_app) if updated_app else None + + +# ============================================================================ +# Token Cleanup +# ============================================================================ + + +async def cleanup_expired_oauth_tokens() -> dict[str, int]: + """ + Delete expired OAuth tokens from the database. + + This removes: + - Expired authorization codes (10 min TTL) + - Expired access tokens (1 hour TTL) + - Expired refresh tokens (30 day TTL) + + Returns a dict with counts of deleted tokens by type. + """ + now = datetime.now(timezone.utc) + + # Delete expired authorization codes + codes_result = await PrismaOAuthAuthorizationCode.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + # Delete expired access tokens + access_result = await PrismaOAuthAccessToken.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + # Delete expired refresh tokens + refresh_result = await PrismaOAuthRefreshToken.prisma().delete_many( + where={"expiresAt": {"lt": now}} + ) + + deleted = { + "authorization_codes": codes_result, + "access_tokens": access_result, + "refresh_tokens": refresh_result, + } + + total = sum(deleted.values()) + if total > 0: + logger.info(f"Cleaned up {total} expired OAuth tokens: {deleted}") + + return deleted diff --git a/autogpt_platform/backend/backend/executor/scheduler.py b/autogpt_platform/backend/backend/executor/scheduler.py index 6a0bb593c6..06c50bf82e 100644 --- a/autogpt_platform/backend/backend/executor/scheduler.py +++ b/autogpt_platform/backend/backend/executor/scheduler.py @@ -23,6 +23,7 @@ from dotenv import load_dotenv from pydantic import BaseModel, Field, ValidationError from sqlalchemy import MetaData, create_engine +from backend.data.auth.oauth import cleanup_expired_oauth_tokens from backend.data.block import BlockInput from backend.data.execution import GraphExecutionWithNodes from backend.data.model import CredentialsMetaInput @@ -242,6 +243,12 @@ def cleanup_expired_files(): run_async(cleanup_expired_files_async()) +def cleanup_oauth_tokens(): + """Clean up expired OAuth tokens from the database.""" + # Wait for completion + run_async(cleanup_expired_oauth_tokens()) + + def execution_accuracy_alerts(): """Check execution accuracy and send alerts if drops are detected.""" return report_execution_accuracy_alerts() @@ -446,6 +453,17 @@ class Scheduler(AppService): jobstore=Jobstores.EXECUTION.value, ) + # OAuth Token Cleanup - configurable interval + self.scheduler.add_job( + cleanup_oauth_tokens, + id="cleanup_oauth_tokens", + trigger="interval", + replace_existing=True, + seconds=config.oauth_token_cleanup_interval_hours + * 3600, # Convert hours to seconds + jobstore=Jobstores.EXECUTION.value, + ) + # Execution Accuracy Monitoring - configurable interval self.scheduler.add_job( execution_accuracy_alerts, @@ -604,6 +622,11 @@ class Scheduler(AppService): """Manually trigger cleanup of expired cloud storage files.""" return cleanup_expired_files() + @expose + def execute_cleanup_oauth_tokens(self): + """Manually trigger cleanup of expired OAuth tokens.""" + return cleanup_oauth_tokens() + @expose def execute_report_execution_accuracy_alerts(self): """Manually trigger execution accuracy alert checking.""" diff --git a/autogpt_platform/backend/backend/server/external/middleware.py b/autogpt_platform/backend/backend/server/external/middleware.py index af84c92687..0c278e1715 100644 --- a/autogpt_platform/backend/backend/server/external/middleware.py +++ b/autogpt_platform/backend/backend/server/external/middleware.py @@ -1,36 +1,107 @@ -from fastapi import HTTPException, Security -from fastapi.security import APIKeyHeader +from fastapi import HTTPException, Security, status +from fastapi.security import APIKeyHeader, HTTPAuthorizationCredentials, HTTPBearer from prisma.enums import APIKeyPermission -from backend.data.api_key import APIKeyInfo, has_permission, validate_api_key +from backend.data.auth.api_key import APIKeyInfo, validate_api_key +from backend.data.auth.base import APIAuthorizationInfo +from backend.data.auth.oauth import ( + InvalidClientError, + InvalidTokenError, + OAuthAccessTokenInfo, + validate_access_token, +) api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False) +bearer_auth = HTTPBearer(auto_error=False) async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKeyInfo: - """Base middleware for API key authentication""" + """Middleware for API key authentication only""" if api_key is None: - raise HTTPException(status_code=401, detail="Missing API key") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Missing API key" + ) api_key_obj = await validate_api_key(api_key) if not api_key_obj: - raise HTTPException(status_code=401, detail="Invalid API key") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key" + ) return api_key_obj +async def require_access_token( + bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth), +) -> OAuthAccessTokenInfo: + """Middleware for OAuth access token authentication only""" + if bearer is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Missing Authorization header", + ) + + try: + token_info, _ = await validate_access_token(bearer.credentials) + except (InvalidClientError, InvalidTokenError) as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) + + return token_info + + +async def require_auth( + api_key: str | None = Security(api_key_header), + bearer: HTTPAuthorizationCredentials | None = Security(bearer_auth), +) -> APIAuthorizationInfo: + """ + Unified authentication middleware supporting both API keys and OAuth tokens. + + Supports two authentication methods, which are checked in order: + 1. X-API-Key header (existing API key authentication) + 2. Authorization: Bearer header (OAuth access token) + + Returns: + APIAuthorizationInfo: base class of both APIKeyInfo and OAuthAccessTokenInfo. + """ + # Try API key first + if api_key is not None: + api_key_info = await validate_api_key(api_key) + if api_key_info: + return api_key_info + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key" + ) + + # Try OAuth bearer token + if bearer is not None: + try: + token_info, _ = await validate_access_token(bearer.credentials) + return token_info + except (InvalidClientError, InvalidTokenError) as e: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(e)) + + # No credentials provided + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Missing authentication. Provide API key or access token.", + ) + + def require_permission(permission: APIKeyPermission): - """Dependency function for checking specific permissions""" + """ + Dependency function for checking specific permissions + (works with API keys and OAuth tokens) + """ async def check_permission( - api_key: APIKeyInfo = Security(require_api_key), - ) -> APIKeyInfo: - if not has_permission(api_key, permission): + auth: APIAuthorizationInfo = Security(require_auth), + ) -> APIAuthorizationInfo: + if permission not in auth.scopes: raise HTTPException( - status_code=403, - detail=f"API key lacks the required permission '{permission}'", + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Missing required permission: {permission.value}", ) - return api_key + return auth return check_permission diff --git a/autogpt_platform/backend/backend/server/external/routes/integrations.py b/autogpt_platform/backend/backend/server/external/routes/integrations.py index d64ca5615f..f9a8875ada 100644 --- a/autogpt_platform/backend/backend/server/external/routes/integrations.py +++ b/autogpt_platform/backend/backend/server/external/routes/integrations.py @@ -16,7 +16,7 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field, SecretStr -from backend.data.api_key import APIKeyInfo +from backend.data.auth.base import APIAuthorizationInfo from backend.data.model import ( APIKeyCredentials, Credentials, @@ -255,7 +255,7 @@ def _get_oauth_handler_for_external( @integrations_router.get("/providers", response_model=list[ProviderInfo]) async def list_providers( - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[ProviderInfo]: @@ -319,7 +319,7 @@ async def list_providers( async def initiate_oauth( provider: Annotated[str, Path(title="The OAuth provider")], request: OAuthInitiateRequest, - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> OAuthInitiateResponse: @@ -337,7 +337,10 @@ async def initiate_oauth( if not validate_callback_url(request.callback_url): raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f"Callback URL origin is not allowed. Allowed origins: {settings.config.external_oauth_callback_origins}", + detail=( + f"Callback URL origin is not allowed. " + f"Allowed origins: {settings.config.external_oauth_callback_origins}", + ), ) # Validate provider @@ -359,13 +362,15 @@ async def initiate_oauth( ) # Store state token with external flow metadata + # Note: initiated_by_api_key_id is only available for API key auth, not OAuth + api_key_id = getattr(auth, "id", None) if auth.type == "api_key" else None state_token, code_challenge = await creds_manager.store.store_state_token( - user_id=api_key.user_id, + user_id=auth.user_id, provider=provider if isinstance(provider_name, str) else provider_name.value, scopes=request.scopes, callback_url=request.callback_url, state_metadata=request.state_metadata, - initiated_by_api_key_id=api_key.id, + initiated_by_api_key_id=api_key_id, ) # Build login URL @@ -393,7 +398,7 @@ async def initiate_oauth( async def complete_oauth( provider: Annotated[str, Path(title="The OAuth provider")], request: OAuthCompleteRequest, - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> OAuthCompleteResponse: @@ -406,7 +411,7 @@ async def complete_oauth( """ # Verify state token valid_state = await creds_manager.store.verify_state_token( - api_key.user_id, request.state_token, provider + auth.user_id, request.state_token, provider ) if not valid_state: @@ -453,7 +458,7 @@ async def complete_oauth( ) # Store credentials - await creds_manager.create(api_key.user_id, credentials) + await creds_manager.create(auth.user_id, credentials) logger.info(f"Successfully completed external OAuth for provider {provider}") @@ -470,7 +475,7 @@ async def complete_oauth( @integrations_router.get("/credentials", response_model=list[CredentialSummary]) async def list_credentials( - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[CredentialSummary]: @@ -479,7 +484,7 @@ async def list_credentials( Returns metadata about each credential without exposing sensitive tokens. """ - credentials = await creds_manager.store.get_all_creds(api_key.user_id) + credentials = await creds_manager.store.get_all_creds(auth.user_id) return [ CredentialSummary( id=cred.id, @@ -499,7 +504,7 @@ async def list_credentials( ) async def list_credentials_by_provider( provider: Annotated[str, Path(title="The provider to list credentials for")], - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.READ_INTEGRATIONS) ), ) -> list[CredentialSummary]: @@ -507,7 +512,7 @@ async def list_credentials_by_provider( List credentials for a specific provider. """ credentials = await creds_manager.store.get_creds_by_provider( - api_key.user_id, provider + auth.user_id, provider ) return [ CredentialSummary( @@ -536,7 +541,7 @@ async def create_credential( CreateUserPasswordCredentialRequest, CreateHostScopedCredentialRequest, ] = Body(..., discriminator="type"), - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.MANAGE_INTEGRATIONS) ), ) -> CreateCredentialResponse: @@ -591,7 +596,7 @@ async def create_credential( # Store credentials try: - await creds_manager.create(api_key.user_id, credentials) + await creds_manager.create(auth.user_id, credentials) except Exception as e: logger.error(f"Failed to store credentials: {e}") raise HTTPException( @@ -623,7 +628,7 @@ class DeleteCredentialResponse(BaseModel): async def delete_credential( provider: Annotated[str, Path(title="The provider")], cred_id: Annotated[str, Path(title="The credential ID to delete")], - api_key: APIKeyInfo = Security( + auth: APIAuthorizationInfo = Security( require_permission(APIKeyPermission.DELETE_INTEGRATIONS) ), ) -> DeleteCredentialResponse: @@ -634,7 +639,7 @@ async def delete_credential( use the main API's delete endpoint which handles webhook cleanup and token revocation. """ - creds = await creds_manager.store.get_creds_by_id(api_key.user_id, cred_id) + creds = await creds_manager.store.get_creds_by_id(auth.user_id, cred_id) if not creds: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail="Credentials not found" @@ -645,6 +650,6 @@ async def delete_credential( detail="Credentials do not match the specified provider", ) - await creds_manager.delete(api_key.user_id, cred_id) + await creds_manager.delete(auth.user_id, cred_id) return DeleteCredentialResponse(deleted=True, credentials_id=cred_id) diff --git a/autogpt_platform/backend/backend/server/external/routes/tools.py b/autogpt_platform/backend/backend/server/external/routes/tools.py index 3a821c5be8..8e3f4cbfdb 100644 --- a/autogpt_platform/backend/backend/server/external/routes/tools.py +++ b/autogpt_platform/backend/backend/server/external/routes/tools.py @@ -14,7 +14,7 @@ from fastapi import APIRouter, Security from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field -from backend.data.api_key import APIKeyInfo +from backend.data.auth.base import APIAuthorizationInfo from backend.server.external.middleware import require_permission from backend.server.v2.chat.model import ChatSession from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool @@ -24,9 +24,9 @@ logger = logging.getLogger(__name__) tools_router = APIRouter(prefix="/tools", tags=["tools"]) -# Note: We use Security() as a function parameter dependency (api_key: APIKeyInfo = Security(...)) +# Note: We use Security() as a function parameter dependency (auth: APIAuthorizationInfo = Security(...)) # rather than in the decorator's dependencies= list. This avoids duplicate permission checks -# while still enforcing auth AND giving us access to the api_key for extracting user_id. +# while still enforcing auth AND giving us access to auth for extracting user_id. # Request models @@ -80,7 +80,9 @@ def _create_ephemeral_session(user_id: str | None) -> ChatSession: ) async def find_agent( request: FindAgentRequest, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.USE_TOOLS) + ), ) -> dict[str, Any]: """ Search for agents in the marketplace based on capabilities and user needs. @@ -91,9 +93,9 @@ async def find_agent( Returns: List of matching agents or no results response """ - session = _create_ephemeral_session(api_key.user_id) + session = _create_ephemeral_session(auth.user_id) result = await find_agent_tool._execute( - user_id=api_key.user_id, + user_id=auth.user_id, session=session, query=request.query, ) @@ -105,7 +107,9 @@ async def find_agent( ) async def run_agent( request: RunAgentRequest, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.USE_TOOLS)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.USE_TOOLS) + ), ) -> dict[str, Any]: """ Run or schedule an agent from the marketplace. @@ -129,9 +133,9 @@ async def run_agent( - execution_started: If agent was run or scheduled successfully - error: If something went wrong """ - session = _create_ephemeral_session(api_key.user_id) + session = _create_ephemeral_session(auth.user_id) result = await run_agent_tool._execute( - user_id=api_key.user_id, + user_id=auth.user_id, session=session, username_agent_slug=request.username_agent_slug, inputs=request.inputs, diff --git a/autogpt_platform/backend/backend/server/external/routes/v1.py b/autogpt_platform/backend/backend/server/external/routes/v1.py index 1b2840acf9..f83673465a 100644 --- a/autogpt_platform/backend/backend/server/external/routes/v1.py +++ b/autogpt_platform/backend/backend/server/external/routes/v1.py @@ -5,6 +5,7 @@ from typing import Annotated, Any, Literal, Optional, Sequence from fastapi import APIRouter, Body, HTTPException, Security from prisma.enums import AgentExecutionStatus, APIKeyPermission +from pydantic import BaseModel, Field from typing_extensions import TypedDict import backend.data.block @@ -12,7 +13,8 @@ import backend.server.v2.store.cache as store_cache import backend.server.v2.store.model as store_model from backend.data import execution as execution_db from backend.data import graph as graph_db -from backend.data.api_key import APIKeyInfo +from backend.data import user as user_db +from backend.data.auth.base import APIAuthorizationInfo from backend.data.block import BlockInput, CompletedBlockOutput from backend.executor.utils import add_graph_execution from backend.server.external.middleware import require_permission @@ -24,27 +26,33 @@ logger = logging.getLogger(__name__) v1_router = APIRouter() -class NodeOutput(TypedDict): - key: str - value: Any +class UserInfoResponse(BaseModel): + id: str + name: Optional[str] + email: str + timezone: str = Field( + description="The user's last known timezone (e.g. 'Europe/Amsterdam'), " + "or 'not-set' if not set" + ) -class ExecutionNode(TypedDict): - node_id: str - input: Any - output: dict[str, Any] +@v1_router.get( + path="/me", + tags=["user", "meta"], +) +async def get_user_info( + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.IDENTITY) + ), +) -> UserInfoResponse: + user = await user_db.get_user_by_id(auth.user_id) - -class ExecutionNodeOutput(TypedDict): - node_id: str - outputs: list[NodeOutput] - - -class GraphExecutionResult(TypedDict): - execution_id: str - status: str - nodes: list[ExecutionNode] - output: Optional[list[dict[str, str]]] + return UserInfoResponse( + id=user.id, + name=user.name, + email=user.email, + timezone=user.timezone, + ) @v1_router.get( @@ -65,7 +73,9 @@ async def get_graph_blocks() -> Sequence[dict[Any, Any]]: async def execute_graph_block( block_id: str, data: BlockInput, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_BLOCK)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.EXECUTE_BLOCK) + ), ) -> CompletedBlockOutput: obj = backend.data.block.get_block(block_id) if not obj: @@ -85,12 +95,14 @@ async def execute_graph( graph_id: str, graph_version: int, node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)], - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_GRAPH)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.EXECUTE_GRAPH) + ), ) -> dict[str, Any]: try: graph_exec = await add_graph_execution( graph_id=graph_id, - user_id=api_key.user_id, + user_id=auth.user_id, inputs=node_input, graph_version=graph_version, ) @@ -100,6 +112,19 @@ async def execute_graph( raise HTTPException(status_code=400, detail=msg) +class ExecutionNode(TypedDict): + node_id: str + input: Any + output: dict[str, Any] + + +class GraphExecutionResult(TypedDict): + execution_id: str + status: str + nodes: list[ExecutionNode] + output: Optional[list[dict[str, str]]] + + @v1_router.get( path="/graphs/{graph_id}/executions/{graph_exec_id}/results", tags=["graphs"], @@ -107,10 +132,12 @@ async def execute_graph( async def get_graph_execution_results( graph_id: str, graph_exec_id: str, - api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.READ_GRAPH)), + auth: APIAuthorizationInfo = Security( + require_permission(APIKeyPermission.READ_GRAPH) + ), ) -> GraphExecutionResult: graph_exec = await execution_db.get_graph_execution( - user_id=api_key.user_id, + user_id=auth.user_id, execution_id=graph_exec_id, include_node_executions=True, ) @@ -122,7 +149,7 @@ async def get_graph_execution_results( if not await graph_db.get_graph( graph_id=graph_exec.graph_id, version=graph_exec.graph_version, - user_id=api_key.user_id, + user_id=auth.user_id, ): raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") diff --git a/autogpt_platform/backend/backend/server/model.py b/autogpt_platform/backend/backend/server/model.py index 1d7b79cd7c..5e13e20450 100644 --- a/autogpt_platform/backend/backend/server/model.py +++ b/autogpt_platform/backend/backend/server/model.py @@ -4,7 +4,7 @@ from typing import Any, Literal, Optional import pydantic from prisma.enums import OnboardingStep -from backend.data.api_key import APIKeyInfo, APIKeyPermission +from backend.data.auth.api_key import APIKeyInfo, APIKeyPermission from backend.data.graph import Graph from backend.util.timezone_name import TimeZoneName diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 556903571c..5db2b18c27 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -21,6 +21,7 @@ import backend.data.db import backend.data.graph import backend.data.user import backend.integrations.webhooks.utils +import backend.server.routers.oauth import backend.server.routers.postmark.postmark import backend.server.routers.v1 import backend.server.v2.admin.credit_admin_routes @@ -297,6 +298,11 @@ app.include_router( tags=["v2", "chat"], prefix="/api/chat", ) +app.include_router( + backend.server.routers.oauth.router, + tags=["oauth"], + prefix="/api/oauth", +) app.mount("/external-api", external_app) diff --git a/autogpt_platform/backend/backend/server/routers/oauth.py b/autogpt_platform/backend/backend/server/routers/oauth.py new file mode 100644 index 0000000000..55f591427a --- /dev/null +++ b/autogpt_platform/backend/backend/server/routers/oauth.py @@ -0,0 +1,833 @@ +""" +OAuth 2.0 Provider Endpoints + +Implements OAuth 2.0 Authorization Code flow with PKCE support. + +Flow: +1. User clicks "Login with AutoGPT" in 3rd party app +2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state +3. User sees consent screen (if not already logged in, redirects to login first) +4. User approves → backend creates authorization code +5. User redirected back to app with code +6. App exchanges code for access/refresh tokens at /oauth/token +7. App uses access token to call external API endpoints +""" + +import io +import logging +import os +import uuid +from datetime import datetime +from typing import Literal, Optional +from urllib.parse import urlencode + +from autogpt_libs.auth import get_user_id +from fastapi import APIRouter, Body, HTTPException, Security, UploadFile, status +from gcloud.aio import storage as async_storage +from PIL import Image +from prisma.enums import APIKeyPermission +from pydantic import BaseModel, Field + +from backend.data.auth.oauth import ( + InvalidClientError, + InvalidGrantError, + OAuthApplicationInfo, + TokenIntrospectionResult, + consume_authorization_code, + create_access_token, + create_authorization_code, + create_refresh_token, + get_oauth_application, + get_oauth_application_by_id, + introspect_token, + list_user_oauth_applications, + refresh_tokens, + revoke_access_token, + revoke_refresh_token, + update_oauth_application, + validate_client_credentials, + validate_redirect_uri, + validate_scopes, +) +from backend.util.settings import Settings +from backend.util.virus_scanner import scan_content_safe + +settings = Settings() +logger = logging.getLogger(__name__) + +router = APIRouter() + + +# ============================================================================ +# Request/Response Models +# ============================================================================ + + +class TokenResponse(BaseModel): + """OAuth 2.0 token response""" + + token_type: Literal["Bearer"] = "Bearer" + access_token: str + access_token_expires_at: datetime + refresh_token: str + refresh_token_expires_at: datetime + scopes: list[str] + + +class ErrorResponse(BaseModel): + """OAuth 2.0 error response""" + + error: str + error_description: Optional[str] = None + + +class OAuthApplicationPublicInfo(BaseModel): + """Public information about an OAuth application (for consent screen)""" + + name: str + description: Optional[str] = None + logo_url: Optional[str] = None + scopes: list[str] + + +# ============================================================================ +# Application Info Endpoint +# ============================================================================ + + +@router.get( + "/app/{client_id}", + responses={ + 404: {"description": "Application not found or disabled"}, + }, +) +async def get_oauth_app_info( + client_id: str, user_id: str = Security(get_user_id) +) -> OAuthApplicationPublicInfo: + """ + Get public information about an OAuth application. + + This endpoint is used by the consent screen to display application details + to the user before they authorize access. + + Returns: + - name: Application name + - description: Application description (if provided) + - scopes: List of scopes the application is allowed to request + """ + app = await get_oauth_application(client_id) + if not app or not app.is_active: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found", + ) + + return OAuthApplicationPublicInfo( + name=app.name, + description=app.description, + logo_url=app.logo_url, + scopes=[s.value for s in app.scopes], + ) + + +# ============================================================================ +# Authorization Endpoint +# ============================================================================ + + +class AuthorizeRequest(BaseModel): + """OAuth 2.0 authorization request""" + + client_id: str = Field(description="Client identifier") + redirect_uri: str = Field(description="Redirect URI") + scopes: list[str] = Field(description="List of scopes") + state: str = Field(description="Anti-CSRF token from client") + response_type: str = Field( + default="code", description="Must be 'code' for authorization code flow" + ) + code_challenge: str = Field(description="PKCE code challenge (required)") + code_challenge_method: Literal["S256", "plain"] = Field( + default="S256", description="PKCE code challenge method (S256 recommended)" + ) + + +class AuthorizeResponse(BaseModel): + """OAuth 2.0 authorization response with redirect URL""" + + redirect_url: str = Field(description="URL to redirect the user to") + + +@router.post("/authorize") +async def authorize( + request: AuthorizeRequest = Body(), + user_id: str = Security(get_user_id), +) -> AuthorizeResponse: + """ + OAuth 2.0 Authorization Endpoint + + User must be logged in (authenticated with Supabase JWT). + This endpoint creates an authorization code and returns a redirect URL. + + PKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests. + + The frontend consent screen should call this endpoint after the user approves, + then redirect the user to the returned `redirect_url`. + + Request Body: + - client_id: The OAuth application's client ID + - redirect_uri: Where to redirect after authorization (must match registered URI) + - scopes: List of permissions (e.g., "EXECUTE_GRAPH READ_GRAPH") + - state: Anti-CSRF token provided by client (will be returned in redirect) + - response_type: Must be "code" (for authorization code flow) + - code_challenge: PKCE code challenge (required) + - code_challenge_method: "S256" (recommended) or "plain" + + Returns: + - redirect_url: The URL to redirect the user to (includes authorization code) + + Error cases return a redirect_url with error parameters, or raise HTTPException + for critical errors (like invalid redirect_uri). + """ + try: + # Validate response_type + if request.response_type != "code": + return _error_redirect_url( + request.redirect_uri, + request.state, + "unsupported_response_type", + "Only 'code' response type is supported", + ) + + # Get application + app = await get_oauth_application(request.client_id) + if not app: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_client", + "Unknown client_id", + ) + + if not app.is_active: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_client", + "Application is not active", + ) + + # Validate redirect URI + if not validate_redirect_uri(app, request.redirect_uri): + # For invalid redirect_uri, we can't redirect safely + # Must return error instead + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=( + "Invalid redirect_uri. " + f"Must be one of: {', '.join(app.redirect_uris)}" + ), + ) + + # Parse and validate scopes + try: + requested_scopes = [APIKeyPermission(s.strip()) for s in request.scopes] + except ValueError as e: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + f"Invalid scope: {e}", + ) + + if not requested_scopes: + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + "At least one scope is required", + ) + + if not validate_scopes(app, requested_scopes): + return _error_redirect_url( + request.redirect_uri, + request.state, + "invalid_scope", + "Application is not authorized for all requested scopes. " + f"Allowed: {', '.join(s.value for s in app.scopes)}", + ) + + # Create authorization code + auth_code = await create_authorization_code( + application_id=app.id, + user_id=user_id, + scopes=requested_scopes, + redirect_uri=request.redirect_uri, + code_challenge=request.code_challenge, + code_challenge_method=request.code_challenge_method, + ) + + # Build redirect URL with authorization code + params = { + "code": auth_code.code, + "state": request.state, + } + redirect_url = f"{request.redirect_uri}?{urlencode(params)}" + + logger.info( + f"Authorization code issued for user #{user_id} " + f"and app {app.name} (#{app.id})" + ) + + return AuthorizeResponse(redirect_url=redirect_url) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error in authorization endpoint: {e}", exc_info=True) + return _error_redirect_url( + request.redirect_uri, + request.state, + "server_error", + "An unexpected error occurred", + ) + + +def _error_redirect_url( + redirect_uri: str, + state: str, + error: str, + error_description: Optional[str] = None, +) -> AuthorizeResponse: + """Helper to build redirect URL with OAuth error parameters""" + params = { + "error": error, + "state": state, + } + if error_description: + params["error_description"] = error_description + + redirect_url = f"{redirect_uri}?{urlencode(params)}" + return AuthorizeResponse(redirect_url=redirect_url) + + +# ============================================================================ +# Token Endpoint +# ============================================================================ + + +class TokenRequestByCode(BaseModel): + grant_type: Literal["authorization_code"] + code: str = Field(description="Authorization code") + redirect_uri: str = Field( + description="Redirect URI (must match authorization request)" + ) + client_id: str + client_secret: str + code_verifier: str = Field(description="PKCE code verifier") + + +class TokenRequestByRefreshToken(BaseModel): + grant_type: Literal["refresh_token"] + refresh_token: str + client_id: str + client_secret: str + + +@router.post("/token") +async def token( + request: TokenRequestByCode | TokenRequestByRefreshToken = Body(), +) -> TokenResponse: + """ + OAuth 2.0 Token Endpoint + + Exchanges authorization code or refresh token for access token. + + Grant Types: + 1. authorization_code: Exchange authorization code for tokens + - Required: grant_type, code, redirect_uri, client_id, client_secret + - Optional: code_verifier (required if PKCE was used) + + 2. refresh_token: Exchange refresh token for new access token + - Required: grant_type, refresh_token, client_id, client_secret + + Returns: + - access_token: Bearer token for API access (1 hour TTL) + - token_type: "Bearer" + - expires_in: Seconds until access token expires + - refresh_token: Token for refreshing access (30 days TTL) + - scopes: List of scopes + """ + # Validate client credentials + try: + app = await validate_client_credentials( + request.client_id, request.client_secret + ) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Handle authorization_code grant + if request.grant_type == "authorization_code": + # Consume authorization code + try: + user_id, scopes = await consume_authorization_code( + code=request.code, + application_id=app.id, + redirect_uri=request.redirect_uri, + code_verifier=request.code_verifier, + ) + except InvalidGrantError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + + # Create access and refresh tokens + access_token = await create_access_token(app.id, user_id, scopes) + refresh_token = await create_refresh_token(app.id, user_id, scopes) + + logger.info( + f"Access token issued for user #{user_id} and app {app.name} (#{app.id})" + "via authorization code" + ) + + if not access_token.token or not refresh_token.token: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate tokens", + ) + + return TokenResponse( + token_type="Bearer", + access_token=access_token.token.get_secret_value(), + access_token_expires_at=access_token.expires_at, + refresh_token=refresh_token.token.get_secret_value(), + refresh_token_expires_at=refresh_token.expires_at, + scopes=list(s.value for s in scopes), + ) + + # Handle refresh_token grant + elif request.grant_type == "refresh_token": + # Refresh access token + try: + new_access_token, new_refresh_token = await refresh_tokens( + request.refresh_token, app.id + ) + except InvalidGrantError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=str(e), + ) + + logger.info( + f"Tokens refreshed for user #{new_access_token.user_id} " + f"by app {app.name} (#{app.id})" + ) + + if not new_access_token.token or not new_refresh_token.token: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to generate tokens", + ) + + return TokenResponse( + token_type="Bearer", + access_token=new_access_token.token.get_secret_value(), + access_token_expires_at=new_access_token.expires_at, + refresh_token=new_refresh_token.token.get_secret_value(), + refresh_token_expires_at=new_refresh_token.expires_at, + scopes=list(s.value for s in new_access_token.scopes), + ) + + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Unsupported grant_type: {request.grant_type}. " + "Must be 'authorization_code' or 'refresh_token'", + ) + + +# ============================================================================ +# Token Introspection Endpoint +# ============================================================================ + + +@router.post("/introspect") +async def introspect( + token: str = Body(description="Token to introspect"), + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body( + None, description="Hint about token type ('access_token' or 'refresh_token')" + ), + client_id: str = Body(description="Client identifier"), + client_secret: str = Body(description="Client secret"), +) -> TokenIntrospectionResult: + """ + OAuth 2.0 Token Introspection Endpoint (RFC 7662) + + Allows clients to check if a token is valid and get its metadata. + + Returns: + - active: Whether the token is currently active + - scopes: List of authorized scopes (if active) + - client_id: The client the token was issued to (if active) + - user_id: The user the token represents (if active) + - exp: Expiration timestamp (if active) + - token_type: "access_token" or "refresh_token" (if active) + """ + # Validate client credentials + try: + await validate_client_credentials(client_id, client_secret) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Introspect the token + return await introspect_token(token, token_type_hint) + + +# ============================================================================ +# Token Revocation Endpoint +# ============================================================================ + + +@router.post("/revoke") +async def revoke( + token: str = Body(description="Token to revoke"), + token_type_hint: Optional[Literal["access_token", "refresh_token"]] = Body( + None, description="Hint about token type ('access_token' or 'refresh_token')" + ), + client_id: str = Body(description="Client identifier"), + client_secret: str = Body(description="Client secret"), +): + """ + OAuth 2.0 Token Revocation Endpoint (RFC 7009) + + Allows clients to revoke an access or refresh token. + + Note: Revoking a refresh token does NOT revoke associated access tokens. + Revoking an access token does NOT revoke the associated refresh token. + """ + # Validate client credentials + try: + app = await validate_client_credentials(client_id, client_secret) + except InvalidClientError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=str(e), + ) + + # Try to revoke as access token first + # Note: We pass app.id to ensure the token belongs to the authenticated app + if token_type_hint != "refresh_token": + revoked = await revoke_access_token(token, app.id) + if revoked: + logger.info( + f"Access token revoked for app {app.name} (#{app.id}); " + f"user #{revoked.user_id}" + ) + return {"status": "ok"} + + # Try to revoke as refresh token + revoked = await revoke_refresh_token(token, app.id) + if revoked: + logger.info( + f"Refresh token revoked for app {app.name} (#{app.id}); " + f"user #{revoked.user_id}" + ) + return {"status": "ok"} + + # Per RFC 7009, revocation endpoint returns 200 even if token not found + # or if token belongs to a different application. + # This prevents token scanning attacks. + logger.warning(f"Unsuccessful token revocation attempt by app {app.name} #{app.id}") + return {"status": "ok"} + + +# ============================================================================ +# Application Management Endpoints (for app owners) +# ============================================================================ + + +@router.get("/apps/mine") +async def list_my_oauth_apps( + user_id: str = Security(get_user_id), +) -> list[OAuthApplicationInfo]: + """ + List all OAuth applications owned by the current user. + + Returns a list of OAuth applications with their details including: + - id, name, description, logo_url + - client_id (public identifier) + - redirect_uris, grant_types, scopes + - is_active status + - created_at, updated_at timestamps + + Note: client_secret is never returned for security reasons. + """ + return await list_user_oauth_applications(user_id) + + +@router.patch("/apps/{app_id}/status") +async def update_app_status( + app_id: str, + user_id: str = Security(get_user_id), + is_active: bool = Body(description="Whether the app should be active", embed=True), +) -> OAuthApplicationInfo: + """ + Enable or disable an OAuth application. + + Only the application owner can update the status. + When disabled, the application cannot be used for new authorizations + and existing access tokens will fail validation. + + Returns the updated application info. + """ + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + is_active=is_active, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + action = "enabled" if is_active else "disabled" + logger.info(f"OAuth app {updated_app.name} (#{app_id}) {action} by user #{user_id}") + + return updated_app + + +class UpdateAppLogoRequest(BaseModel): + logo_url: str = Field(description="URL of the uploaded logo image") + + +@router.patch("/apps/{app_id}/logo") +async def update_app_logo( + app_id: str, + request: UpdateAppLogoRequest = Body(), + user_id: str = Security(get_user_id), +) -> OAuthApplicationInfo: + """ + Update the logo URL for an OAuth application. + + Only the application owner can update the logo. + The logo should be uploaded first using the media upload endpoint, + then this endpoint is called with the resulting URL. + + Logo requirements: + - Must be square (1:1 aspect ratio) + - Minimum 512x512 pixels + - Maximum 2048x2048 pixels + + Returns the updated application info. + """ + if ( + not (app := await get_oauth_application_by_id(app_id)) + or app.owner_id != user_id + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="OAuth App not found", + ) + + # Delete the current app logo file (if any and it's in our cloud storage) + await _delete_app_current_logo_file(app) + + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + logo_url=request.logo_url, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + logger.info( + f"OAuth app {updated_app.name} (#{app_id}) logo updated by user #{user_id}" + ) + + return updated_app + + +# Logo upload constraints +LOGO_MIN_SIZE = 512 +LOGO_MAX_SIZE = 2048 +LOGO_ALLOWED_TYPES = {"image/jpeg", "image/png", "image/webp"} +LOGO_MAX_FILE_SIZE = 3 * 1024 * 1024 # 3MB + + +@router.post("/apps/{app_id}/logo/upload") +async def upload_app_logo( + app_id: str, + file: UploadFile, + user_id: str = Security(get_user_id), +) -> OAuthApplicationInfo: + """ + Upload a logo image for an OAuth application. + + Requirements: + - Image must be square (1:1 aspect ratio) + - Minimum 512x512 pixels + - Maximum 2048x2048 pixels + - Allowed formats: JPEG, PNG, WebP + - Maximum file size: 3MB + + The image is uploaded to cloud storage and the app's logoUrl is updated. + Returns the updated application info. + """ + # Verify ownership to reduce vulnerability to DoS(torage) or DoM(oney) attacks + if ( + not (app := await get_oauth_application_by_id(app_id)) + or app.owner_id != user_id + ): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="OAuth App not found", + ) + + # Check GCS configuration + if not settings.config.media_gcs_bucket_name: + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Media storage is not configured", + ) + + # Validate content type + content_type = file.content_type + if content_type not in LOGO_ALLOWED_TYPES: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid file type. Allowed: JPEG, PNG, WebP. Got: {content_type}", + ) + + # Read file content + try: + file_bytes = await file.read() + except Exception as e: + logger.error(f"Error reading logo file: {e}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to read uploaded file", + ) + + # Check file size + if len(file_bytes) > LOGO_MAX_FILE_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=( + "File too large. " + f"Maximum size is {LOGO_MAX_FILE_SIZE // 1024 // 1024}MB" + ), + ) + + # Validate image dimensions + try: + image = Image.open(io.BytesIO(file_bytes)) + width, height = image.size + + if width != height: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo must be square. Got {width}x{height}", + ) + + if width < LOGO_MIN_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo too small. Minimum {LOGO_MIN_SIZE}x{LOGO_MIN_SIZE}. " + f"Got {width}x{height}", + ) + + if width > LOGO_MAX_SIZE: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Logo too large. Maximum {LOGO_MAX_SIZE}x{LOGO_MAX_SIZE}. " + f"Got {width}x{height}", + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error validating logo image: {e}") + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid image file", + ) + + # Scan for viruses + filename = file.filename or "logo" + await scan_content_safe(file_bytes, filename=filename) + + # Generate unique filename + file_ext = os.path.splitext(filename)[1].lower() or ".png" + unique_filename = f"{uuid.uuid4()}{file_ext}" + storage_path = f"oauth-apps/{app_id}/logo/{unique_filename}" + + # Upload to GCS + try: + async with async_storage.Storage() as async_client: + bucket_name = settings.config.media_gcs_bucket_name + + await async_client.upload( + bucket_name, storage_path, file_bytes, content_type=content_type + ) + + logo_url = f"https://storage.googleapis.com/{bucket_name}/{storage_path}" + except Exception as e: + logger.error(f"Error uploading logo to GCS: {e}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to upload logo", + ) + + # Delete the current app logo file (if any and it's in our cloud storage) + await _delete_app_current_logo_file(app) + + # Update the app with the new logo URL + updated_app = await update_oauth_application( + app_id=app_id, + owner_id=user_id, + logo_url=logo_url, + ) + + if not updated_app: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Application not found or you don't have permission to update it", + ) + + logger.info( + f"OAuth app {updated_app.name} (#{app_id}) logo uploaded by user #{user_id}" + ) + + return updated_app + + +async def _delete_app_current_logo_file(app: OAuthApplicationInfo): + """ + Delete the current logo file for the given app, if there is one in our cloud storage + """ + bucket_name = settings.config.media_gcs_bucket_name + storage_base_url = f"https://storage.googleapis.com/{bucket_name}/" + + if app.logo_url and app.logo_url.startswith(storage_base_url): + # Parse blob path from URL: https://storage.googleapis.com/{bucket}/{path} + old_path = app.logo_url.replace(storage_base_url, "") + try: + async with async_storage.Storage() as async_client: + await async_client.delete(bucket_name, old_path) + logger.info(f"Deleted old logo for OAuth app #{app.id}: {old_path}") + except Exception as e: + # Log but don't fail - the new logo was uploaded successfully + logger.warning( + f"Failed to delete old logo for OAuth app #{app.id}: {e}", exc_info=e + ) diff --git a/autogpt_platform/backend/backend/server/routers/oauth_test.py b/autogpt_platform/backend/backend/server/routers/oauth_test.py new file mode 100644 index 0000000000..8ec6911152 --- /dev/null +++ b/autogpt_platform/backend/backend/server/routers/oauth_test.py @@ -0,0 +1,1784 @@ +""" +End-to-end integration tests for OAuth 2.0 Provider Endpoints. + +These tests hit the actual API endpoints and database, testing the complete +OAuth flow from endpoint to database. + +Tests cover: +1. Authorization endpoint - creating authorization codes +2. Token endpoint - exchanging codes for tokens and refreshing +3. Token introspection endpoint - checking token validity +4. Token revocation endpoint - revoking tokens +5. Complete OAuth flow end-to-end +""" + +import base64 +import hashlib +import secrets +import uuid +from typing import AsyncGenerator + +import httpx +import pytest +from autogpt_libs.api_key.keysmith import APIKeySmith +from prisma.enums import APIKeyPermission +from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken +from prisma.models import OAuthApplication as PrismaOAuthApplication +from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode +from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken +from prisma.models import User as PrismaUser + +from backend.server.rest_api import app + +keysmith = APIKeySmith() + + +# ============================================================================ +# Test Fixtures +# ============================================================================ + + +@pytest.fixture +def test_user_id() -> str: + """Test user ID for OAuth tests.""" + return str(uuid.uuid4()) + + +@pytest.fixture +async def test_user(server, test_user_id: str): + """Create a test user in the database.""" + await PrismaUser.prisma().create( + data={ + "id": test_user_id, + "email": f"oauth-test-{test_user_id}@example.com", + "name": "OAuth Test User", + } + ) + + yield test_user_id + + # Cleanup - delete in correct order due to foreign key constraints + await PrismaOAuthAccessToken.prisma().delete_many(where={"userId": test_user_id}) + await PrismaOAuthRefreshToken.prisma().delete_many(where={"userId": test_user_id}) + await PrismaOAuthAuthorizationCode.prisma().delete_many( + where={"userId": test_user_id} + ) + await PrismaOAuthApplication.prisma().delete_many(where={"ownerId": test_user_id}) + await PrismaUser.prisma().delete(where={"id": test_user_id}) + + +@pytest.fixture +async def test_oauth_app(test_user: str): + """Create a test OAuth application in the database.""" + app_id = str(uuid.uuid4()) + client_id = f"test_client_{secrets.token_urlsafe(8)}" + # Secret must start with "agpt_" prefix for keysmith verification to work + client_secret_plaintext = f"agpt_secret_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Test OAuth App", + "description": "Test application for integration tests", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": [ + "https://example.com/callback", + "http://localhost:3000/callback", + ], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://example.com/callback", + } + + # Cleanup is handled by test_user fixture (cascade delete) + + +def generate_pkce() -> tuple[str, str]: + """Generate PKCE code verifier and challenge.""" + verifier = secrets.token_urlsafe(32) + challenge = ( + base64.urlsafe_b64encode(hashlib.sha256(verifier.encode("ascii")).digest()) + .decode("ascii") + .rstrip("=") + ) + return verifier, challenge + + +@pytest.fixture +def pkce_credentials() -> tuple[str, str]: + """Generate PKCE code verifier and challenge as a fixture.""" + return generate_pkce() + + +@pytest.fixture +async def client(server, test_user: str) -> AsyncGenerator[httpx.AsyncClient, None]: + """ + Create an async HTTP client that talks directly to the FastAPI app. + + Uses ASGI transport so we don't need an actual HTTP server running. + Also overrides get_user_id dependency to return our test user. + + Depends on `server` to ensure the DB is connected and `test_user` to ensure + the user exists in the database before running tests. + """ + from autogpt_libs.auth import get_user_id + + # Override get_user_id dependency to return our test user + def override_get_user_id(): + return test_user + + # Store original override if any + original_override = app.dependency_overrides.get(get_user_id) + + # Set our override + app.dependency_overrides[get_user_id] = override_get_user_id + + try: + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url="http://test", + ) as http_client: + yield http_client + finally: + # Restore original override + if original_override is not None: + app.dependency_overrides[get_user_id] = original_override + else: + app.dependency_overrides.pop(get_user_id, None) + + +# ============================================================================ +# Authorization Endpoint Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_creates_code_in_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test that authorization endpoint creates a code in the database.""" + verifier, challenge = pkce_credentials + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "test_state_123", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + redirect_url = response.json()["redirect_url"] + + # Parse the redirect URL to get the authorization code + from urllib.parse import parse_qs, urlparse + + parsed = urlparse(redirect_url) + query_params = parse_qs(parsed.query) + + assert "code" in query_params, f"Expected 'code' in query params: {query_params}" + auth_code = query_params["code"][0] + assert query_params["state"][0] == "test_state_123" + + # Verify code exists in database + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + + assert db_code is not None + assert db_code.userId == test_user + assert db_code.applicationId == test_oauth_app["id"] + assert db_code.redirectUri == test_oauth_app["redirect_uri"] + assert APIKeyPermission.EXECUTE_GRAPH in db_code.scopes + assert APIKeyPermission.READ_GRAPH in db_code.scopes + assert db_code.usedAt is None # Not yet consumed + assert db_code.codeChallenge == challenge + assert db_code.codeChallengeMethod == "S256" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_with_pkce_stores_challenge( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test that PKCE code challenge is stored correctly.""" + verifier, challenge = pkce_credentials + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "pkce_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + + from urllib.parse import parse_qs, urlparse + + auth_code = parse_qs(urlparse(response.json()["redirect_url"]).query)["code"][0] + + # Verify PKCE challenge is stored + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + + assert db_code is not None + assert db_code.codeChallenge == challenge + assert db_code.codeChallengeMethod == "S256" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_client_returns_error( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that invalid client_id returns error in redirect.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": "nonexistent_client_id", + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "error_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_client" + + +@pytest.fixture +async def inactive_oauth_app(test_user: str): + """Create an inactive test OAuth application in the database.""" + app_id = str(uuid.uuid4()) + client_id = f"inactive_client_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_secret_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Inactive OAuth App", + "description": "Inactive test application", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://example.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": False, # Inactive! + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://example.com/callback", + } + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_inactive_app( + client: httpx.AsyncClient, + test_user: str, + inactive_oauth_app: dict, +): + """Test that authorization with inactive app returns error.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": inactive_oauth_app["client_id"], + "redirect_uri": inactive_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "inactive_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_client" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_redirect_uri( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with unregistered redirect_uri returns HTTP error.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": "https://malicious.com/callback", + "scopes": ["EXECUTE_GRAPH"], + "state": "invalid_redirect_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + # Invalid redirect_uri should return HTTP 400, not a redirect + assert response.status_code == 400 + assert "redirect_uri" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_invalid_scope( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with invalid scope value.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["INVALID_SCOPE_NAME"], + "state": "invalid_scope_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_scope" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_unauthorized_scope( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization requesting scope not authorized for app.""" + _, challenge = generate_pkce() + + # The test_oauth_app only has EXECUTE_GRAPH and READ_GRAPH scopes + # DELETE_GRAPH is not in the app's allowed scopes + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["DELETE_GRAPH"], # Not authorized for this app + "state": "unauthorized_scope_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "invalid_scope" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorize_unsupported_response_type( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test authorization with unsupported response_type.""" + _, challenge = generate_pkce() + + response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "unsupported_response_test", + "response_type": "token", # Implicit flow not supported + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert response.status_code == 200 + from urllib.parse import parse_qs, urlparse + + query_params = parse_qs(urlparse(response.json()["redirect_url"]).query) + assert query_params["error"][0] == "unsupported_response_type" + + +# ============================================================================ +# Token Endpoint Integration Tests - Authorization Code Grant +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_exchange_creates_tokens_in_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that token exchange creates access and refresh tokens in database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # First get an authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "token_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Exchange code for tokens + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert token_response.status_code == 200 + tokens = token_response.json() + + assert "access_token" in tokens + assert "refresh_token" in tokens + assert tokens["token_type"] == "Bearer" + assert "EXECUTE_GRAPH" in tokens["scopes"] + assert "READ_GRAPH" in tokens["scopes"] + + # Verify access token exists in database (hashed) + access_token_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_access_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_token_hash} + ) + + assert db_access_token is not None + assert db_access_token.userId == test_user + assert db_access_token.applicationId == test_oauth_app["id"] + assert db_access_token.revokedAt is None + + # Verify refresh token exists in database (hashed) + refresh_token_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + db_refresh_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_token_hash} + ) + + assert db_refresh_token is not None + assert db_refresh_token.userId == test_user + assert db_refresh_token.applicationId == test_oauth_app["id"] + assert db_refresh_token.revokedAt is None + + # Verify authorization code is marked as used + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + assert db_code is not None + assert db_code.usedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_authorization_code_cannot_be_reused( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that authorization code can only be used once.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "reuse_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # First exchange - should succeed + first_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + assert first_response.status_code == 200 + + # Second exchange - should fail + second_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + assert second_response.status_code == 400 + assert "already used" in second_response.json()["detail"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_exchange_with_invalid_client_secret( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that token exchange fails with invalid client secret.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "bad_secret_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with wrong secret + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret", + "code_verifier": verifier, + }, + ) + + assert response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_invalid_code( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with invalid/nonexistent authorization code.""" + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": "nonexistent_invalid_code_xyz", + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "", + }, + ) + + assert response.status_code == 400 + assert "not found" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_expired( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with expired authorization code.""" + from datetime import datetime, timedelta, timezone + + # Create an expired authorization code directly in the database + expired_code = f"expired_code_{secrets.token_urlsafe(16)}" + now = datetime.now(timezone.utc) + + await PrismaOAuthAuthorizationCode.prisma().create( + data={ + "code": expired_code, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "redirectUri": test_oauth_app["redirect_uri"], + "expiresAt": now - timedelta(hours=1), # Already expired + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": expired_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "", + }, + ) + + assert response.status_code == 400 + assert "expired" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_redirect_uri_mismatch( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token exchange with mismatched redirect_uri.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get authorization code with one redirect_uri + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "redirect_mismatch_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with different redirect_uri + # Note: localhost:3000 is in the app's registered redirect_uris + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + # Different redirect_uri from authorization request + "redirect_uri": "http://localhost:3000/callback", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert response.status_code == 400 + assert "redirect_uri" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_authorization_code_pkce_failure( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """Test token exchange with PKCE verification failure (wrong verifier).""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = pkce_credentials + + # Get authorization code with PKCE challenge + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "pkce_failure_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + # Try to exchange with wrong verifier + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": "wrong_verifier_that_does_not_match", + }, + ) + + assert response.status_code == 400 + assert "pkce" in response.json()["detail"].lower() + + +# ============================================================================ +# Token Endpoint Integration Tests - Refresh Token Grant +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_refresh_token_creates_new_tokens( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that refresh token grant creates new access and refresh tokens.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get initial tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + initial_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + initial_tokens = initial_response.json() + + # Use refresh token to get new tokens + refresh_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": initial_tokens["refresh_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert refresh_response.status_code == 200 + new_tokens = refresh_response.json() + + # Tokens should be different + assert new_tokens["access_token"] != initial_tokens["access_token"] + assert new_tokens["refresh_token"] != initial_tokens["refresh_token"] + + # Old refresh token should be revoked in database + old_refresh_hash = hashlib.sha256( + initial_tokens["refresh_token"].encode() + ).hexdigest() + old_db_token = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": old_refresh_hash} + ) + assert old_db_token is not None + assert old_db_token.revokedAt is not None + + # New tokens should exist and be valid + new_access_hash = hashlib.sha256(new_tokens["access_token"].encode()).hexdigest() + new_db_access = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": new_access_hash} + ) + assert new_db_access is not None + assert new_db_access.revokedAt is None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_invalid_token( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test token refresh with invalid/nonexistent refresh token.""" + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": "completely_invalid_refresh_token_xyz", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "not found" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_expired( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token refresh with expired refresh token.""" + from datetime import datetime, timedelta, timezone + + # Create an expired refresh token directly in the database + expired_token_value = f"expired_refresh_{secrets.token_urlsafe(16)}" + expired_token_hash = hashlib.sha256(expired_token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": expired_token_hash, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now - timedelta(days=1), # Already expired + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": expired_token_value, + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "expired" in response.json()["detail"].lower() + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_revoked( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test token refresh with revoked refresh token.""" + from datetime import datetime, timedelta, timezone + + # Create a revoked refresh token directly in the database + revoked_token_value = f"revoked_refresh_{secrets.token_urlsafe(16)}" + revoked_token_hash = hashlib.sha256(revoked_token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": revoked_token_hash, + "applicationId": test_oauth_app["id"], + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(days=30), # Not expired + "revokedAt": now - timedelta(hours=1), # But revoked + } + ) + + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": revoked_token_value, + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "revoked" in response.json()["detail"].lower() + + +@pytest.fixture +async def other_oauth_app(test_user: str): + """Create a second OAuth application for cross-app tests.""" + app_id = str(uuid.uuid4()) + client_id = f"other_client_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_other_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "Other OAuth App", + "description": "Second test application", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://other.example.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + yield { + "id": app_id, + "client_id": client_id, + "client_secret": client_secret_plaintext, + "redirect_uri": "https://other.example.com/callback", + } + + +@pytest.mark.asyncio(loop_scope="session") +async def test_token_refresh_wrong_application( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + other_oauth_app: dict, +): + """Test token refresh with token from different application.""" + from datetime import datetime, timedelta, timezone + + # Create a refresh token for `test_oauth_app` + token_value = f"app1_refresh_{secrets.token_urlsafe(16)}" + token_hash = hashlib.sha256(token_value.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthRefreshToken.prisma().create( + data={ + "token": token_hash, + "applicationId": test_oauth_app["id"], # Belongs to test_oauth_app + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(days=30), + } + ) + + # Try to use it with `other_oauth_app` + response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": token_value, + "client_id": other_oauth_app["client_id"], + "client_secret": other_oauth_app["client_secret"], + }, + ) + + assert response.status_code == 400 + assert "does not belong" in response.json()["detail"].lower() + + +# ============================================================================ +# Token Introspection Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_valid_access_token( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test introspection returns correct info for valid access token.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "introspect_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Introspect the access token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + data = introspect_response.json() + + assert data["active"] is True + assert data["token_type"] == "access_token" + assert data["user_id"] == test_user + assert data["client_id"] == test_oauth_app["client_id"] + assert "EXECUTE_GRAPH" in data["scopes"] + assert "READ_GRAPH" in data["scopes"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_invalid_token_returns_inactive( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test introspection returns inactive for non-existent token.""" + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": "completely_invalid_token_that_does_not_exist", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + assert introspect_response.json()["active"] is False + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_active_refresh_token( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test introspection returns correct info for valid refresh token.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens via the full flow + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "introspect_refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Introspect the refresh token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["refresh_token"], + "token_type_hint": "refresh_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + data = introspect_response.json() + + assert data["active"] is True + assert data["token_type"] == "refresh_token" + assert data["user_id"] == test_user + assert data["client_id"] == test_oauth_app["client_id"] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_introspect_invalid_client( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test introspection with invalid client credentials.""" + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": "some_token", + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret_value", + }, + ) + + assert introspect_response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_validate_access_token_fails_when_app_disabled( + test_user: str, +): + """ + Test that validate_access_token raises InvalidClientError when the app is disabled. + + This tests the security feature where disabling an OAuth application + immediately invalidates all its access tokens. + """ + from datetime import datetime, timedelta, timezone + + from backend.data.auth.oauth import InvalidClientError, validate_access_token + + # Create an OAuth app + app_id = str(uuid.uuid4()) + client_id = f"disable_test_{secrets.token_urlsafe(8)}" + client_secret_plaintext = f"agpt_disable_{secrets.token_urlsafe(16)}" + client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app_id, + "name": "App To Be Disabled", + "description": "Test app for disabled validation", + "clientId": client_id, + "clientSecret": client_secret_hash, + "clientSecretSalt": client_secret_salt, + "redirectUris": ["https://example.com/callback"], + "grantTypes": ["authorization_code"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + # Create an access token directly in the database + token_plaintext = f"test_token_{secrets.token_urlsafe(32)}" + token_hash = hashlib.sha256(token_plaintext.encode()).hexdigest() + now = datetime.now(timezone.utc) + + await PrismaOAuthAccessToken.prisma().create( + data={ + "token": token_hash, + "applicationId": app_id, + "userId": test_user, + "scopes": [APIKeyPermission.EXECUTE_GRAPH], + "expiresAt": now + timedelta(hours=1), + } + ) + + # Token should be valid while app is active + token_info, _ = await validate_access_token(token_plaintext) + assert token_info.user_id == test_user + + # Disable the app + await PrismaOAuthApplication.prisma().update( + where={"id": app_id}, + data={"isActive": False}, + ) + + # Token should now fail validation with InvalidClientError + with pytest.raises(InvalidClientError, match="disabled"): + await validate_access_token(token_plaintext) + + # Cleanup + await PrismaOAuthApplication.prisma().delete(where={"id": app_id}) + + +# ============================================================================ +# Token Revocation Integration Tests +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_access_token_updates_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that revoking access token updates database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "revoke_access_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Verify token is not revoked in database + access_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_token_before = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_before is not None + assert db_token_before.revokedAt is None + + # Revoke the token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # Verify token is now revoked in database + db_token_after = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_unknown_token_returns_ok( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test that revoking unknown token returns 200 (per RFC 7009).""" + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": "unknown_token_that_does_not_exist_anywhere", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + # Per RFC 7009, should return 200 even for unknown tokens + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_refresh_token_updates_database( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """Test that revoking refresh token updates database.""" + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "revoke_refresh_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Verify refresh token is not revoked in database + refresh_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + db_token_before = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_hash} + ) + assert db_token_before is not None + assert db_token_before.revokedAt is None + + # Revoke the refresh token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["refresh_token"], + "token_type_hint": "refresh_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # Verify refresh token is now revoked in database + db_token_after = await PrismaOAuthRefreshToken.prisma().find_unique( + where={"token": refresh_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_invalid_client( + client: httpx.AsyncClient, + test_oauth_app: dict, +): + """Test revocation with invalid client credentials.""" + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": "some_token", + "client_id": test_oauth_app["client_id"], + "client_secret": "wrong_secret_value", + }, + ) + + assert revoke_response.status_code == 401 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_revoke_token_from_different_app_fails_silently( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, +): + """ + Test that an app cannot revoke tokens belonging to a different app. + + Per RFC 7009, the endpoint still returns 200 OK (to prevent token scanning), + but the token should remain valid in the database. + """ + from urllib.parse import parse_qs, urlparse + + verifier, challenge = generate_pkce() + + # Get tokens for app 1 + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH"], + "state": "cross_app_revoke_test", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + auth_code = parse_qs(urlparse(auth_response.json()["redirect_url"]).query)["code"][ + 0 + ] + + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + tokens = token_response.json() + + # Create a second OAuth app + app2_id = str(uuid.uuid4()) + app2_client_id = f"test_client_app2_{secrets.token_urlsafe(8)}" + app2_client_secret_plaintext = f"agpt_secret_app2_{secrets.token_urlsafe(16)}" + app2_client_secret_hash, app2_client_secret_salt = keysmith.hash_key( + app2_client_secret_plaintext + ) + + await PrismaOAuthApplication.prisma().create( + data={ + "id": app2_id, + "name": "Second Test OAuth App", + "description": "Second test application for cross-app revocation test", + "clientId": app2_client_id, + "clientSecret": app2_client_secret_hash, + "clientSecretSalt": app2_client_secret_salt, + "redirectUris": ["https://other-app.com/callback"], + "grantTypes": ["authorization_code", "refresh_token"], + "scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH], + "ownerId": test_user, + "isActive": True, + } + ) + + # App 2 tries to revoke App 1's access token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": app2_client_id, + "client_secret": app2_client_secret_plaintext, + }, + ) + + # Per RFC 7009, returns 200 OK even if token not found/not owned + assert revoke_response.status_code == 200 + assert revoke_response.json()["status"] == "ok" + + # But the token should NOT be revoked in the database + access_hash = hashlib.sha256(tokens["access_token"].encode()).hexdigest() + db_token = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token is not None + assert db_token.revokedAt is None, "Token should NOT be revoked by different app" + + # Now app 1 revokes its own token - should work + revoke_response2 = await client.post( + "/api/oauth/revoke", + json={ + "token": tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response2.status_code == 200 + + # Token should now be revoked + db_token_after = await PrismaOAuthAccessToken.prisma().find_unique( + where={"token": access_hash} + ) + assert db_token_after is not None + assert db_token_after.revokedAt is not None, "Token should be revoked by own app" + + # Cleanup second app + await PrismaOAuthApplication.prisma().delete(where={"id": app2_id}) + + +# ============================================================================ +# Complete End-to-End OAuth Flow Test +# ============================================================================ + + +@pytest.mark.asyncio(loop_scope="session") +async def test_complete_oauth_flow_end_to_end( + client: httpx.AsyncClient, + test_user: str, + test_oauth_app: dict, + pkce_credentials: tuple[str, str], +): + """ + Test the complete OAuth 2.0 flow from authorization to token refresh. + + This is a comprehensive integration test that verifies the entire + OAuth flow works correctly with real API calls and database operations. + """ + from urllib.parse import parse_qs, urlparse + + verifier, challenge = pkce_credentials + + # Step 1: Authorization request with PKCE + auth_response = await client.post( + "/api/oauth/authorize", + json={ + "client_id": test_oauth_app["client_id"], + "redirect_uri": test_oauth_app["redirect_uri"], + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "state": "e2e_test_state", + "response_type": "code", + "code_challenge": challenge, + "code_challenge_method": "S256", + }, + follow_redirects=False, + ) + + assert auth_response.status_code == 200 + + redirect_url = auth_response.json()["redirect_url"] + query = parse_qs(urlparse(redirect_url).query) + + assert query["state"][0] == "e2e_test_state" + auth_code = query["code"][0] + + # Verify authorization code in database + db_code = await PrismaOAuthAuthorizationCode.prisma().find_unique( + where={"code": auth_code} + ) + assert db_code is not None + assert db_code.codeChallenge == challenge + + # Step 2: Exchange code for tokens + token_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "authorization_code", + "code": auth_code, + "redirect_uri": test_oauth_app["redirect_uri"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + "code_verifier": verifier, + }, + ) + + assert token_response.status_code == 200 + tokens = token_response.json() + assert "access_token" in tokens + assert "refresh_token" in tokens + + # Verify code is marked as used + db_code_used = await PrismaOAuthAuthorizationCode.prisma().find_unique_or_raise( + where={"code": auth_code} + ) + assert db_code_used.usedAt is not None + + # Step 3: Introspect access token + introspect_response = await client.post( + "/api/oauth/introspect", + json={ + "token": tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert introspect_response.status_code == 200 + introspect_data = introspect_response.json() + assert introspect_data["active"] is True + assert introspect_data["user_id"] == test_user + + # Step 4: Refresh tokens + refresh_response = await client.post( + "/api/oauth/token", + json={ + "grant_type": "refresh_token", + "refresh_token": tokens["refresh_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert refresh_response.status_code == 200 + new_tokens = refresh_response.json() + assert new_tokens["access_token"] != tokens["access_token"] + assert new_tokens["refresh_token"] != tokens["refresh_token"] + + # Verify old refresh token is revoked + old_refresh_hash = hashlib.sha256(tokens["refresh_token"].encode()).hexdigest() + old_db_refresh = await PrismaOAuthRefreshToken.prisma().find_unique_or_raise( + where={"token": old_refresh_hash} + ) + assert old_db_refresh.revokedAt is not None + + # Step 5: Verify new access token works + new_introspect = await client.post( + "/api/oauth/introspect", + json={ + "token": new_tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert new_introspect.status_code == 200 + assert new_introspect.json()["active"] is True + + # Step 6: Revoke new access token + revoke_response = await client.post( + "/api/oauth/revoke", + json={ + "token": new_tokens["access_token"], + "token_type_hint": "access_token", + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert revoke_response.status_code == 200 + + # Step 7: Verify revoked token is inactive + final_introspect = await client.post( + "/api/oauth/introspect", + json={ + "token": new_tokens["access_token"], + "client_id": test_oauth_app["client_id"], + "client_secret": test_oauth_app["client_secret"], + }, + ) + + assert final_introspect.status_code == 200 + assert final_introspect.json()["active"] is False + + # Verify in database + new_access_hash = hashlib.sha256(new_tokens["access_token"].encode()).hexdigest() + db_revoked = await PrismaOAuthAccessToken.prisma().find_unique_or_raise( + where={"token": new_access_hash} + ) + assert db_revoked.revokedAt is not None diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/server/routers/v1.py index d74d4ecdf7..e5e74690f8 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/server/routers/v1.py @@ -31,9 +31,9 @@ from typing_extensions import Optional, TypedDict import backend.server.integrations.router import backend.server.routers.analytics import backend.server.v2.library.db as library_db -from backend.data import api_key as api_key_db from backend.data import execution as execution_db from backend.data import graph as graph_db +from backend.data.auth import api_key as api_key_db from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks from backend.data.credit import ( AutoTopUpConfig, diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 4eb45dc972..0f17b1215c 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -362,6 +362,13 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): description="Hours between cloud storage cleanup runs (1-24 hours)", ) + oauth_token_cleanup_interval_hours: int = Field( + default=6, + ge=1, + le=24, + description="Hours between OAuth token cleanup runs (1-24 hours)", + ) + upload_file_size_limit_mb: int = Field( default=256, ge=1, diff --git a/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql b/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql new file mode 100644 index 0000000000..9c8672c4c3 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251212165920_add_oauth_provider_support/migration.sql @@ -0,0 +1,129 @@ +-- CreateTable +CREATE TABLE "OAuthApplication" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + "name" TEXT NOT NULL, + "description" TEXT, + "clientId" TEXT NOT NULL, + "clientSecret" TEXT NOT NULL, + "clientSecretSalt" TEXT NOT NULL, + "redirectUris" TEXT[], + "grantTypes" TEXT[] DEFAULT ARRAY['authorization_code', 'refresh_token']::TEXT[], + "scopes" "APIKeyPermission"[], + "ownerId" TEXT NOT NULL, + "isActive" BOOLEAN NOT NULL DEFAULT true, + + CONSTRAINT "OAuthApplication_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthAuthorizationCode" ( + "id" TEXT NOT NULL, + "code" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "redirectUri" TEXT NOT NULL, + "codeChallenge" TEXT, + "codeChallengeMethod" TEXT, + "usedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthAuthorizationCode_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthAccessToken" ( + "id" TEXT NOT NULL, + "token" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "revokedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthAccessToken_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "OAuthRefreshToken" ( + "id" TEXT NOT NULL, + "token" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "expiresAt" TIMESTAMP(3) NOT NULL, + "applicationId" TEXT NOT NULL, + "userId" TEXT NOT NULL, + "scopes" "APIKeyPermission"[], + "revokedAt" TIMESTAMP(3), + + CONSTRAINT "OAuthRefreshToken_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthApplication_clientId_key" ON "OAuthApplication"("clientId"); + +-- CreateIndex +CREATE INDEX "OAuthApplication_clientId_idx" ON "OAuthApplication"("clientId"); + +-- CreateIndex +CREATE INDEX "OAuthApplication_ownerId_idx" ON "OAuthApplication"("ownerId"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthAuthorizationCode_code_key" ON "OAuthAuthorizationCode"("code"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_code_idx" ON "OAuthAuthorizationCode"("code"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_applicationId_userId_idx" ON "OAuthAuthorizationCode"("applicationId", "userId"); + +-- CreateIndex +CREATE INDEX "OAuthAuthorizationCode_expiresAt_idx" ON "OAuthAuthorizationCode"("expiresAt"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthAccessToken_token_key" ON "OAuthAccessToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_token_idx" ON "OAuthAccessToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_userId_applicationId_idx" ON "OAuthAccessToken"("userId", "applicationId"); + +-- CreateIndex +CREATE INDEX "OAuthAccessToken_expiresAt_idx" ON "OAuthAccessToken"("expiresAt"); + +-- CreateIndex +CREATE UNIQUE INDEX "OAuthRefreshToken_token_key" ON "OAuthRefreshToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_token_idx" ON "OAuthRefreshToken"("token"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_userId_applicationId_idx" ON "OAuthRefreshToken"("userId", "applicationId"); + +-- CreateIndex +CREATE INDEX "OAuthRefreshToken_expiresAt_idx" ON "OAuthRefreshToken"("expiresAt"); + +-- AddForeignKey +ALTER TABLE "OAuthApplication" ADD CONSTRAINT "OAuthApplication_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAuthorizationCode" ADD CONSTRAINT "OAuthAuthorizationCode_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthAccessToken" ADD CONSTRAINT "OAuthAccessToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_applicationId_fkey" FOREIGN KEY ("applicationId") REFERENCES "OAuthApplication"("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "OAuthRefreshToken" ADD CONSTRAINT "OAuthRefreshToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql b/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql new file mode 100644 index 0000000000..c9c8c76df1 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251218231330_add_oauth_app_logo/migration.sql @@ -0,0 +1,5 @@ +-- AlterEnum +ALTER TYPE "APIKeyPermission" ADD VALUE 'IDENTITY'; + +-- AlterTable +ALTER TABLE "OAuthApplication" ADD COLUMN "logoUrl" TEXT; diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index a87ae8e71d..fb06b65162 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -115,6 +115,8 @@ format = "linter:format" lint = "linter:lint" test = "run_tests:test" load-store-agents = "test.load_store_agents:run" +export-api-schema = "backend.cli.generate_openapi_json:main" +oauth-tool = "backend.cli.oauth_tool:cli" [tool.isort] profile = "black" diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 121ccab5fc..d81cd4d1b1 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -61,6 +61,12 @@ model User { IntegrationWebhooks IntegrationWebhook[] NotificationBatches UserNotificationBatch[] PendingHumanReviews PendingHumanReview[] + + // OAuth Provider relations + OAuthApplications OAuthApplication[] + OAuthAuthorizationCodes OAuthAuthorizationCode[] + OAuthAccessTokens OAuthAccessToken[] + OAuthRefreshTokens OAuthRefreshToken[] } enum OnboardingStep { @@ -924,6 +930,7 @@ enum SubmissionStatus { } enum APIKeyPermission { + IDENTITY // Info about the authenticated user EXECUTE_GRAPH // Can execute agent graphs READ_GRAPH // Can get graph versions and details EXECUTE_BLOCK // Can execute individual blocks @@ -975,3 +982,113 @@ enum APIKeyStatus { REVOKED SUSPENDED } + +//////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////// +////////////// OAUTH PROVIDER TABLES ////////////////// +//////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////// + +// OAuth2 applications that can access AutoGPT on behalf of users +model OAuthApplication { + id String @id @default(uuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + // Application metadata + name String + description String? + logoUrl String? // URL to app logo stored in GCS + clientId String @unique + clientSecret String // Hashed with Scrypt (same as API keys) + clientSecretSalt String // Salt for Scrypt hashing + + // OAuth configuration + redirectUris String[] // Allowed callback URLs + grantTypes String[] @default(["authorization_code", "refresh_token"]) + scopes APIKeyPermission[] // Which permissions the app can request + + // Application management + ownerId String + Owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade) + isActive Boolean @default(true) + + // Relations + AuthorizationCodes OAuthAuthorizationCode[] + AccessTokens OAuthAccessToken[] + RefreshTokens OAuthRefreshToken[] + + @@index([clientId]) + @@index([ownerId]) +} + +// Temporary authorization codes (10 min TTL) +model OAuthAuthorizationCode { + id String @id @default(uuid()) + code String @unique + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 10 minutes + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + redirectUri String // Must match one from application + + // PKCE (Proof Key for Code Exchange) support + codeChallenge String? + codeChallengeMethod String? // "S256" or "plain" + + usedAt DateTime? // Set when code is consumed + + @@index([code]) + @@index([applicationId, userId]) + @@index([expiresAt]) // For cleanup +} + +// Access tokens (1 hour TTL) +model OAuthAccessToken { + id String @id @default(uuid()) + token String @unique // SHA256 hash of plaintext token + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 1 hour + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + + revokedAt DateTime? // Set when token is revoked + + @@index([token]) // For token lookup + @@index([userId, applicationId]) + @@index([expiresAt]) // For cleanup +} + +// Refresh tokens (30 days TTL) +model OAuthRefreshToken { + id String @id @default(uuid()) + token String @unique // SHA256 hash of plaintext token + createdAt DateTime @default(now()) + expiresAt DateTime // Now + 30 days + + applicationId String + Application OAuthApplication @relation(fields: [applicationId], references: [id], onDelete: Cascade) + + userId String + User User @relation(fields: [userId], references: [id], onDelete: Cascade) + + scopes APIKeyPermission[] + + revokedAt DateTime? // Set when token is revoked + + @@index([token]) // For token lookup + @@index([userId, applicationId]) + @@index([expiresAt]) // For cleanup +} diff --git a/autogpt_platform/backend/test/e2e_test_data.py b/autogpt_platform/backend/test/e2e_test_data.py index 013c8c11a7..943c506f5c 100644 --- a/autogpt_platform/backend/test/e2e_test_data.py +++ b/autogpt_platform/backend/test/e2e_test_data.py @@ -23,13 +23,13 @@ from typing import Any, Dict, List from faker import Faker -from backend.data.api_key import create_api_key +from backend.data.auth.api_key import create_api_key from backend.data.credit import get_user_credit_model from backend.data.db import prisma from backend.data.graph import Graph, Link, Node, create_graph +from backend.data.user import get_or_create_user # Import API functions from the backend -from backend.data.user import get_or_create_user from backend.server.v2.library.db import create_library_agent, create_preset from backend.server.v2.library.model import LibraryAgentPresetCreatable from backend.server.v2.store.db import create_store_submission, review_store_submission @@ -464,7 +464,7 @@ class TestDataCreator: api_keys = [] for user in self.users: - from backend.data.api_key import APIKeyPermission + from backend.data.auth.api_key import APIKeyPermission try: # Use the API function to create API key diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx new file mode 100644 index 0000000000..8093b75965 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx @@ -0,0 +1,296 @@ +"use client"; + +import { useState } from "react"; +import { useSearchParams } from "next/navigation"; +import { AuthCard } from "@/components/auth/AuthCard"; +import { Text } from "@/components/atoms/Text/Text"; +import { Button } from "@/components/atoms/Button/Button"; +import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; +import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { ImageIcon, SealCheckIcon } from "@phosphor-icons/react"; +import { + postOauthAuthorize, + useGetOauthGetOauthAppInfo, +} from "@/app/api/__generated__/endpoints/oauth/oauth"; +import type { APIKeyPermission } from "@/app/api/__generated__/models/aPIKeyPermission"; + +// Human-readable scope descriptions +const SCOPE_DESCRIPTIONS: { [key in APIKeyPermission]: string } = { + IDENTITY: "Read user ID, name, e-mail, and timezone", + EXECUTE_GRAPH: "Run your agents", + READ_GRAPH: "View your agents and their configurations", + EXECUTE_BLOCK: "Execute individual blocks", + READ_BLOCK: "View available blocks", + READ_STORE: "Access the Marketplace", + USE_TOOLS: "Use tools on your behalf", + MANAGE_INTEGRATIONS: "Set up new integrations", + READ_INTEGRATIONS: "View your connected integrations", + DELETE_INTEGRATIONS: "Remove connected integrations", +}; + +export default function AuthorizePage() { + const searchParams = useSearchParams(); + + // Extract OAuth parameters from URL + const clientID = searchParams.get("client_id"); + const redirectURI = searchParams.get("redirect_uri"); + const scope = searchParams.get("scope"); + const state = searchParams.get("state"); + const codeChallenge = searchParams.get("code_challenge"); + const codeChallengeMethod = + searchParams.get("code_challenge_method") || "S256"; + const responseType = searchParams.get("response_type") || "code"; + + // Parse requested scopes + const requestedScopes = scope?.split(" ").filter(Boolean) || []; + + // Fetch application info using generated hook + const { + data: appInfoResponse, + isLoading, + error, + refetch, + } = useGetOauthGetOauthAppInfo(clientID || "", { + query: { + enabled: !!clientID, + staleTime: Infinity, + refetchOnMount: false, + refetchOnWindowFocus: false, + refetchOnReconnect: false, + }, + }); + + const appInfo = appInfoResponse?.status === 200 ? appInfoResponse.data : null; + + // Validate required parameters + const missingParams: string[] = []; + if (!clientID) missingParams.push("client_id"); + if (!redirectURI) missingParams.push("redirect_uri"); + if (!scope) missingParams.push("scope"); + if (!state) missingParams.push("state"); + if (!codeChallenge) missingParams.push("code_challenge"); + + const [isAuthorizing, setIsAuthorizing] = useState(false); + const [authorizeError, setAuthorizeError] = useState(null); + + async function handleApprove() { + setIsAuthorizing(true); + setAuthorizeError(null); + + try { + // Call the backend /oauth/authorize POST endpoint + // Returns JSON with redirect_url that we use to redirect the user + const response = await postOauthAuthorize({ + client_id: clientID!, + redirect_uri: redirectURI!, + scopes: requestedScopes, + state: state!, + response_type: responseType, + code_challenge: codeChallenge!, + code_challenge_method: codeChallengeMethod as "S256" | "plain", + }); + + if (response.status === 200 && response.data.redirect_url) { + window.location.href = response.data.redirect_url; + } else { + setAuthorizeError("Authorization failed: no redirect URL received"); + setIsAuthorizing(false); + } + } catch (err) { + console.error("Authorization error:", err); + setAuthorizeError( + err instanceof Error ? err.message : "Authorization failed", + ); + setIsAuthorizing(false); + } + } + + function handleDeny() { + // Redirect back to client with access_denied error + const params = new URLSearchParams({ + error: "access_denied", + error_description: "User denied access", + state: state || "", + }); + window.location.href = `${redirectURI}?${params.toString()}`; + } + + // Show error if missing required parameters + if (missingParams.length > 0) { + return ( +
+ + + +
+ ); + } + + // Show loading state + if (isLoading) { + return ( +
+ +
+ + + Loading application information... + +
+
+
+ ); + } + + // Show error if app not found + if (error || !appInfo) { + return ( +
+ + + {redirectURI && ( + + )} + +
+ ); + } + + // Validate that requested scopes are allowed by the app + const invalidScopes = requestedScopes.filter( + (s) => !appInfo.scopes.includes(s), + ); + + if (invalidScopes.length > 0) { + return ( +
+ + + + +
+ ); + } + + return ( +
+ +
+ {/* App info */} +
+ {/* App logo */} +
+ {appInfo.logo_url ? ( + // eslint-disable-next-line @next/next/no-img-element + {`${appInfo.name} + ) : ( + + )} +
+ + {appInfo.name} + + {appInfo.description && ( + + {appInfo.description} + + )} +
+ + {/* Permissions */} +
+ + This application is requesting permission to: + +
    + {requestedScopes.map((scopeKey) => ( +
  • + + + {SCOPE_DESCRIPTIONS[scopeKey as APIKeyPermission] || + scopeKey} + +
  • + ))} +
+
+ + {/* Error message */} + {authorizeError && ( + + )} + + {/* Action buttons */} +
+ + +
+ + {/* Warning */} + + By authorizing, you allow this application to access your AutoGPT + account with the permissions listed above. + +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts b/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts index bff2fd0b68..13f8d988fe 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts +++ b/autogpt_platform/frontend/src/app/(platform)/auth/callback/route.ts @@ -74,6 +74,9 @@ export async function GET(request: Request) { ); } + // Get redirect destination from 'next' query parameter + next = searchParams.get("next") || next; + const forwardedHost = request.headers.get("x-forwarded-host"); // original origin before load balancer const isLocalEnv = process.env.NODE_ENV === "development"; if (isLocalEnv) { diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx new file mode 100644 index 0000000000..5163c46d5b --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx @@ -0,0 +1,331 @@ +"use client"; + +import Image from "next/image"; +import Link from "next/link"; +import { useSearchParams } from "next/navigation"; +import { useState, useMemo, useRef } from "react"; +import { AuthCard } from "@/components/auth/AuthCard"; +import { Text } from "@/components/atoms/Text/Text"; +import { Button } from "@/components/atoms/Button/Button"; +import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs"; +import type { + BlockIOCredentialsSubSchema, + CredentialsMetaInput, + CredentialsType, +} from "@/lib/autogpt-server-api"; +import { CheckIcon, CircleIcon } from "@phosphor-icons/react"; +import { useGetOauthGetOauthAppInfo } from "@/app/api/__generated__/endpoints/oauth/oauth"; +import { okData } from "@/app/api/helpers"; +import { OAuthApplicationPublicInfo } from "@/app/api/__generated__/models/oAuthApplicationPublicInfo"; + +// All credential types - we accept any type of credential +const ALL_CREDENTIAL_TYPES: CredentialsType[] = [ + "api_key", + "oauth2", + "user_password", + "host_scoped", +]; + +/** + * Provider configuration for the setup wizard. + * + * Query parameters: + * - `providers`: base64-encoded JSON array of { provider, scopes? } objects + * - `app_name`: (optional) Name of the requesting application + * - `redirect_uri`: Where to redirect after completion + * - `state`: Anti-CSRF token + * + * Example `providers` JSON: + * [ + * { "provider": "google", "scopes": ["https://www.googleapis.com/auth/gmail.readonly"] }, + * { "provider": "github", "scopes": ["repo"] } + * ] + * + * Example URL: + * /auth/integrations/setup-wizard?app_name=My%20App&providers=W3sicHJvdmlkZXIiOiJnb29nbGUifV0=&redirect_uri=... + */ +interface ProviderConfig { + provider: string; + scopes?: string[]; +} + +function createSchemaFromProviderConfig( + config: ProviderConfig, +): BlockIOCredentialsSubSchema { + return { + type: "object", + properties: {}, + credentials_provider: [config.provider], + credentials_types: ALL_CREDENTIAL_TYPES, + credentials_scopes: config.scopes, + discriminator: undefined, + discriminator_mapping: undefined, + discriminator_values: undefined, + }; +} + +function toDisplayName(provider: string): string { + // Convert snake_case or kebab-case to Title Case + return provider + .split(/[_-]/) + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(" "); +} + +function parseProvidersParam(providersParam: string): ProviderConfig[] { + try { + // Decode base64 and parse JSON + const decoded = atob(providersParam); + const parsed = JSON.parse(decoded); + + if (!Array.isArray(parsed)) { + console.warn("providers parameter is not an array"); + return []; + } + + return parsed.filter( + (item): item is ProviderConfig => + typeof item === "object" && + item !== null && + typeof item.provider === "string", + ); + } catch (error) { + console.warn("Failed to parse providers parameter:", error); + return []; + } +} + +export default function IntegrationSetupWizardPage() { + const searchParams = useSearchParams(); + + // Extract query parameters + // `providers` is a base64-encoded JSON array of { provider, scopes?: string[] } objects + const clientID = searchParams.get("client_id"); + const providersParam = searchParams.get("providers"); + const redirectURI = searchParams.get("redirect_uri"); + const state = searchParams.get("state"); + + const { data: appInfo } = useGetOauthGetOauthAppInfo(clientID || "", { + query: { enabled: !!clientID, select: okData }, + }); + + // Parse providers from base64-encoded JSON + const providerConfigs = useMemo(() => { + if (!providersParam) return []; + return parseProvidersParam(providersParam); + }, [providersParam]); + + // Track selected credentials for each provider + const [selectedCredentials, setSelectedCredentials] = useState< + Record + >({}); + + // Track if we've already redirected + const hasRedirectedRef = useRef(false); + + // Check if all providers have credentials + const isAllComplete = useMemo(() => { + if (providerConfigs.length === 0) return false; + return providerConfigs.every( + (config) => selectedCredentials[config.provider], + ); + }, [providerConfigs, selectedCredentials]); + + // Handle credential selection + const handleCredentialSelect = ( + provider: string, + credential?: CredentialsMetaInput, + ) => { + setSelectedCredentials((prev) => ({ + ...prev, + [provider]: credential, + })); + }; + + // Handle completion - redirect back to client + const handleComplete = () => { + if (!redirectURI || hasRedirectedRef.current) return; + hasRedirectedRef.current = true; + + const params = new URLSearchParams({ + success: "true", + }); + if (state) { + params.set("state", state); + } + + window.location.href = `${redirectURI}?${params.toString()}`; + }; + + // Handle cancel - redirect back to client with error + const handleCancel = () => { + if (!redirectURI || hasRedirectedRef.current) return; + hasRedirectedRef.current = true; + + const params = new URLSearchParams({ + error: "user_cancelled", + error_description: "User cancelled the integration setup", + }); + if (state) { + params.set("state", state); + } + + window.location.href = `${redirectURI}?${params.toString()}`; + }; + + // Validate required parameters + const missingParams: string[] = []; + if (!providersParam) missingParams.push("providers"); + if (!redirectURI) missingParams.push("redirect_uri"); + + if (missingParams.length > 0) { + return ( +
+ + + +
+ ); + } + + if (providerConfigs.length === 0) { + return ( +
+ + + + +
+ ); + } + + return ( +
+ +
+ + {appInfo ? ( + <> + {appInfo.name} is requesting you to connect the + following integrations to your AutoGPT account. + + ) : ( + "Please connect the following integrations to continue." + )} + + + {/* Provider credentials list */} +
+ {providerConfigs.map((config) => { + const schema = createSchemaFromProviderConfig(config); + const isSelected = !!selectedCredentials[config.provider]; + + return ( +
+
+
+ {`${config.provider} +
+ + {toDisplayName(config.provider)} + +
+ {isSelected ? ( + + ) : ( + + )} + {isSelected && ( + + Connected + + )} +
+ + + handleCredentialSelect(config.provider, credMeta) + } + showTitle={false} + className="mb-0" + /> +
+ ); + })} +
+ + {/* Action buttons */} +
+ + +
+ + {/* Link to integrations settings */} + + You can view and manage all your integrations in your{" "} + + integration settings + + . + +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx index e63105c751..07350fb610 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs.tsx @@ -15,13 +15,14 @@ import { HostScopedCredentialsModal } from "./components/HotScopedCredentialsMod import { OAuthFlowWaitingModal } from "./components/OAuthWaitingModal/OAuthWaitingModal"; import { PasswordCredentialsModal } from "./components/PasswordCredentialsModal/PasswordCredentialsModal"; import { getCredentialDisplayName } from "./helpers"; -import { useCredentialsInputs } from "./useCredentialsInputs"; - -type UseCredentialsInputsReturn = ReturnType; +import { + CredentialsInputState, + useCredentialsInput, +} from "./useCredentialsInput"; function isLoaded( - data: UseCredentialsInputsReturn, -): data is Extract { + data: CredentialsInputState, +): data is Extract { return data.isLoading === false; } @@ -33,21 +34,23 @@ type Props = { onSelectCredentials: (newValue?: CredentialsMetaInput) => void; onLoaded?: (loaded: boolean) => void; readOnly?: boolean; + showTitle?: boolean; }; export function CredentialsInput({ schema, className, - selectedCredentials, - onSelectCredentials, + selectedCredentials: selectedCredential, + onSelectCredentials: onSelectCredential, siblingInputs, onLoaded, readOnly = false, + showTitle = true, }: Props) { - const hookData = useCredentialsInputs({ + const hookData = useCredentialsInput({ schema, - selectedCredentials, - onSelectCredentials, + selectedCredential, + onSelectCredential, siblingInputs, onLoaded, readOnly, @@ -89,12 +92,14 @@ export function CredentialsInput({ return (
-
- {displayName} credentials - {schema.description && ( - - )} -
+ {showTitle && ( +
+ {displayName} credentials + {schema.description && ( + + )} +
+ )} {hasCredentialsToShow ? ( <> @@ -103,7 +108,7 @@ export function CredentialsInput({ credentials={credentialsToShow} provider={provider} displayName={displayName} - selectedCredentials={selectedCredentials} + selectedCredentials={selectedCredential} onSelectCredential={handleCredentialSelect} readOnly={readOnly} /> @@ -164,7 +169,7 @@ export function CredentialsInput({ open={isAPICredentialsModalOpen} onClose={() => setAPICredentialsModalOpen(false)} onCredentialsCreate={(credsMeta) => { - onSelectCredentials(credsMeta); + onSelectCredential(credsMeta); setAPICredentialsModalOpen(false); }} siblingInputs={siblingInputs} @@ -183,7 +188,7 @@ export function CredentialsInput({ open={isUserPasswordCredentialsModalOpen} onClose={() => setUserPasswordCredentialsModalOpen(false)} onCredentialsCreate={(creds) => { - onSelectCredentials(creds); + onSelectCredential(creds); setUserPasswordCredentialsModalOpen(false); }} siblingInputs={siblingInputs} @@ -195,7 +200,7 @@ export function CredentialsInput({ open={isHostScopedCredentialsModalOpen} onClose={() => setHostScopedCredentialsModalOpen(false)} onCredentialsCreate={(creds) => { - onSelectCredentials(creds); + onSelectCredential(creds); setHostScopedCredentialsModalOpen(false); }} siblingInputs={siblingInputs} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts similarity index 76% rename from autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts rename to autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts index 460980c10b..6f5ca48126 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInputs.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/useCredentialsInput.ts @@ -5,32 +5,33 @@ import { BlockIOCredentialsSubSchema, CredentialsMetaInput, } from "@/lib/autogpt-server-api/types"; -import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider"; import { useQueryClient } from "@tanstack/react-query"; -import { useContext, useEffect, useMemo, useState } from "react"; +import { useEffect, useMemo, useState } from "react"; import { getActionButtonText, OAUTH_TIMEOUT_MS, OAuthPopupResultMessage, } from "./helpers"; -type Args = { +export type CredentialsInputState = ReturnType; + +type Params = { schema: BlockIOCredentialsSubSchema; - selectedCredentials?: CredentialsMetaInput; - onSelectCredentials: (newValue?: CredentialsMetaInput) => void; + selectedCredential?: CredentialsMetaInput; + onSelectCredential: (newValue?: CredentialsMetaInput) => void; siblingInputs?: Record; onLoaded?: (loaded: boolean) => void; readOnly?: boolean; }; -export function useCredentialsInputs({ +export function useCredentialsInput({ schema, - selectedCredentials, - onSelectCredentials, + selectedCredential, + onSelectCredential, siblingInputs, onLoaded, readOnly = false, -}: Args) { +}: Params) { const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] = useState(false); const [ @@ -51,7 +52,6 @@ export function useCredentialsInputs({ const api = useBackendAPI(); const queryClient = useQueryClient(); const credentials = useCredentials(schema, siblingInputs); - const allProviders = useContext(CredentialsProvidersContext); const deleteCredentialsMutation = useDeleteV1DeleteCredentials({ mutation: { @@ -63,57 +63,49 @@ export function useCredentialsInputs({ queryKey: [`/api/integrations/${credentials?.provider}/credentials`], }); setCredentialToDelete(null); - if (selectedCredentials?.id === credentialToDelete?.id) { - onSelectCredentials(undefined); + if (selectedCredential?.id === credentialToDelete?.id) { + onSelectCredential(undefined); } }, }, }); - const rawProvider = credentials - ? allProviders?.[credentials.provider as keyof typeof allProviders] - : null; - useEffect(() => { if (onLoaded) { onLoaded(Boolean(credentials && credentials.isLoading === false)); } }, [credentials, onLoaded]); + // Unselect credential if not available useEffect(() => { if (readOnly) return; if (!credentials || !("savedCredentials" in credentials)) return; if ( - selectedCredentials && - !credentials.savedCredentials.some((c) => c.id === selectedCredentials.id) + selectedCredential && + !credentials.savedCredentials.some((c) => c.id === selectedCredential.id) ) { - onSelectCredentials(undefined); + onSelectCredential(undefined); } - }, [credentials, selectedCredentials, onSelectCredentials, readOnly]); + }, [credentials, selectedCredential, onSelectCredential, readOnly]); - const { singleCredential } = useMemo(() => { + // The available credential, if there is only one + const singleCredential = useMemo(() => { if (!credentials || !("savedCredentials" in credentials)) { - return { - singleCredential: null, - }; + return null; } - const single = - credentials.savedCredentials.length === 1 - ? credentials.savedCredentials[0] - : null; - - return { - singleCredential: single, - }; + return credentials.savedCredentials.length === 1 + ? credentials.savedCredentials[0] + : null; }, [credentials]); + // Auto-select the one available credential useEffect(() => { if (readOnly) return; - if (singleCredential && !selectedCredentials) { - onSelectCredentials(singleCredential); + if (singleCredential && !selectedCredential) { + onSelectCredential(singleCredential); } - }, [singleCredential, selectedCredentials, onSelectCredentials, readOnly]); + }, [singleCredential, selectedCredential, onSelectCredential, readOnly]); if ( !credentials || @@ -136,25 +128,6 @@ export function useCredentialsInputs({ oAuthCallback, } = credentials; - const allSavedCredentials = rawProvider?.savedCredentials || savedCredentials; - - const credentialsToShow = (() => { - const creds = [...allSavedCredentials]; - if ( - !readOnly && - selectedCredentials && - !creds.some((c) => c.id === selectedCredentials.id) - ) { - creds.push({ - id: selectedCredentials.id, - type: selectedCredentials.type, - title: selectedCredentials.title || "Selected credential", - provider: provider, - } as any); - } - return creds; - })(); - async function handleOAuthLogin() { setOAuthError(null); const { login_url, state_token } = await api.oAuthLogin( @@ -207,7 +180,31 @@ export function useCredentialsInputs({ console.debug("Processing OAuth callback"); const credentials = await oAuthCallback(e.data.code, e.data.state); console.debug("OAuth callback processed successfully"); - onSelectCredentials({ + + // Check if the credential's scopes match the required scopes + const requiredScopes = schema.credentials_scopes; + if (requiredScopes && requiredScopes.length > 0) { + const grantedScopes = new Set(credentials.scopes || []); + const hasAllRequiredScopes = new Set(requiredScopes).isSubsetOf( + grantedScopes, + ); + + if (!hasAllRequiredScopes) { + console.error( + `Newly created OAuth credential for ${providerName} has insufficient scopes. Required:`, + requiredScopes, + "Granted:", + credentials.scopes, + ); + setOAuthError( + "Connection failed: the granted permissions don't match what's required. " + + "Please contact the application administrator.", + ); + return; + } + } + + onSelectCredential({ id: credentials.id, type: "oauth2", title: credentials.title, @@ -253,9 +250,9 @@ export function useCredentialsInputs({ } function handleCredentialSelect(credentialId: string) { - const selectedCreds = credentialsToShow.find((c) => c.id === credentialId); + const selectedCreds = savedCredentials.find((c) => c.id === credentialId); if (selectedCreds) { - onSelectCredentials({ + onSelectCredential({ id: selectedCreds.id, type: selectedCreds.type, provider: provider, @@ -285,8 +282,8 @@ export function useCredentialsInputs({ supportsOAuth2, supportsUserPassword, supportsHostScoped, - credentialsToShow, - selectedCredentials, + credentialsToShow: savedCredentials, + selectedCredential, oAuthError, isAPICredentialsModalOpen, isUserPasswordCredentialsModalOpen, @@ -300,7 +297,7 @@ export function useCredentialsInputs({ supportsApiKey, supportsUserPassword, supportsHostScoped, - credentialsToShow.length > 0, + savedCredentials.length > 0, ), setAPICredentialsModalOpen, setUserPasswordCredentialsModalOpen, @@ -311,7 +308,7 @@ export function useCredentialsInputs({ handleDeleteCredential, handleDeleteConfirm, handleOAuthLogin, - onSelectCredentials, + onSelectCredential, schema, siblingInputs, }; diff --git a/autogpt_platform/frontend/src/app/(platform)/login/page.tsx b/autogpt_platform/frontend/src/app/(platform)/login/page.tsx index 3f06e7f429..b670be5127 100644 --- a/autogpt_platform/frontend/src/app/(platform)/login/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/login/page.tsx @@ -11,8 +11,16 @@ import { environment } from "@/services/environment"; import { LoadingLogin } from "./components/LoadingLogin"; import { useLoginPage } from "./useLoginPage"; import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner"; +import { useSearchParams } from "next/navigation"; export default function LoginPage() { + const searchParams = useSearchParams(); + const nextUrl = searchParams.get("next"); + // Preserve next parameter when switching between login/signup + const signupHref = nextUrl + ? `/signup?next=${encodeURIComponent(nextUrl)}` + : "/signup"; + const { user, form, @@ -108,7 +116,7 @@ export default function LoginPage() { diff --git a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts index a1e8b5a92c..656e1febc2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/login/useLoginPage.ts @@ -3,7 +3,7 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { environment } from "@/services/environment"; import { loginFormSchema, LoginProvider } from "@/types/auth"; import { zodResolver } from "@hookform/resolvers/zod"; -import { useRouter } from "next/navigation"; +import { useRouter, useSearchParams } from "next/navigation"; import { useEffect, useState } from "react"; import { useForm } from "react-hook-form"; import z from "zod"; @@ -13,6 +13,7 @@ export function useLoginPage() { const { supabase, user, isUserLoading, isLoggedIn } = useSupabase(); const [feedback, setFeedback] = useState(null); const router = useRouter(); + const searchParams = useSearchParams(); const { toast } = useToast(); const [isLoading, setIsLoading] = useState(false); const [isLoggingIn, setIsLoggingIn] = useState(false); @@ -20,11 +21,14 @@ export function useLoginPage() { const [showNotAllowedModal, setShowNotAllowedModal] = useState(false); const isCloudEnv = environment.isCloud(); + // Get redirect destination from 'next' query parameter + const nextUrl = searchParams.get("next"); + useEffect(() => { if (isLoggedIn && !isLoggingIn) { - router.push("/marketplace"); + router.push(nextUrl || "/marketplace"); } - }, [isLoggedIn, isLoggingIn]); + }, [isLoggedIn, isLoggingIn, nextUrl, router]); const form = useForm>({ resolver: zodResolver(loginFormSchema), @@ -39,10 +43,16 @@ export function useLoginPage() { setIsLoggingIn(true); try { + // Include next URL in OAuth flow if present + const callbackUrl = nextUrl + ? `/auth/callback?next=${encodeURIComponent(nextUrl)}` + : `/auth/callback`; + const fullCallbackUrl = `${window.location.origin}${callbackUrl}`; + const response = await fetch("/api/auth/provider", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ provider }), + body: JSON.stringify({ provider, redirectTo: fullCallbackUrl }), }); if (!response.ok) { @@ -83,7 +93,9 @@ export function useLoginPage() { throw new Error(result.error || "Login failed"); } - if (result.onboarding) { + if (nextUrl) { + router.replace(nextUrl); + } else if (result.onboarding) { router.replace("/onboarding"); } else { router.replace("/marketplace"); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/useAPISection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeySection/useAPISection.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/APIKeysModals.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/APIKeysModals.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/APIKeysModals.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/APIKeysModals.tsx diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/useAPIkeysModals.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/useAPIkeysModals.ts similarity index 100% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/components/APIKeysModals/useAPIkeysModals.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeysModals/useAPIkeysModals.ts diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx similarity index 94% rename from autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx rename to autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx index ca66f0fb85..aedc3cc60c 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api_keys/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/page.tsx @@ -1,5 +1,5 @@ import { Metadata } from "next/types"; -import { APIKeysSection } from "@/app/(platform)/profile/(user)/api_keys/components/APIKeySection/APIKeySection"; +import { APIKeysSection } from "@/app/(platform)/profile/(user)/api-keys/components/APIKeySection/APIKeySection"; import { Card, CardContent, diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx index 800028a49f..ca0e846557 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/layout.tsx @@ -3,13 +3,14 @@ import * as React from "react"; import { Sidebar } from "@/components/__legacy__/Sidebar"; import { - IconDashboardLayout, - IconIntegrations, - IconProfile, - IconSliders, - IconCoin, -} from "@/components/__legacy__/ui/icons"; -import { KeyIcon } from "lucide-react"; + AppWindowIcon, + CoinsIcon, + KeyIcon, + PlugsIcon, + SlidersHorizontalIcon, + StorefrontIcon, + UserCircleIcon, +} from "@phosphor-icons/react"; import { useGetFlag, Flag } from "@/services/feature-flags/use-get-flag"; export default function Layout({ children }: { children: React.ReactNode }) { @@ -18,39 +19,44 @@ export default function Layout({ children }: { children: React.ReactNode }) { const sidebarLinkGroups = [ { links: [ + { + text: "Profile", + href: "/profile", + icon: , + }, { text: "Creator Dashboard", href: "/profile/dashboard", - icon: , + icon: , }, - ...(isPaymentEnabled + ...(isPaymentEnabled || true ? [ { text: "Billing", href: "/profile/credits", - icon: , + icon: , }, ] : []), { text: "Integrations", href: "/profile/integrations", - icon: , - }, - { - text: "API Keys", - href: "/profile/api_keys", - icon: , - }, - { - text: "Profile", - href: "/profile", - icon: , + icon: , }, { text: "Settings", href: "/profile/settings", - icon: , + icon: , + }, + { + text: "API Keys", + href: "/profile/api-keys", + icon: , + }, + { + text: "OAuth Apps", + href: "/profile/oauth-apps", + icon: , }, ], }, diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx new file mode 100644 index 0000000000..a864199348 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/OAuthAppsSection.tsx @@ -0,0 +1,147 @@ +"use client"; + +import { useRef } from "react"; +import { UploadIcon, ImageIcon, PowerIcon } from "@phosphor-icons/react"; +import { Button } from "@/components/atoms/Button/Button"; +import { Badge } from "@/components/atoms/Badge/Badge"; +import { useOAuthApps } from "./useOAuthApps"; +import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; + +export function OAuthAppsSection() { + const { + oauthApps, + isLoading, + updatingAppId, + uploadingAppId, + handleToggleStatus, + handleUploadLogo, + } = useOAuthApps(); + + const fileInputRefs = useRef<{ [key: string]: HTMLInputElement | null }>({}); + + const handleFileChange = ( + appId: string, + event: React.ChangeEvent, + ) => { + const file = event.target.files?.[0]; + if (file) { + handleUploadLogo(appId, file); + } + // Reset the input so the same file can be selected again + event.target.value = ""; + }; + + if (isLoading) { + return ( +
+ +
+ ); + } + + if (oauthApps.length === 0) { + return ( +
+

You don't have any OAuth applications.

+

+ OAuth applications can currently not be registered + via the API. Contact the system administrator to request an OAuth app + registration. +

+
+ ); + } + + return ( +
+ {oauthApps.map((app) => ( +
+ {/* Header: Logo, Name, Status */} +
+
+ {app.logo_url ? ( + // eslint-disable-next-line @next/next/no-img-element + {`${app.name} + ) : ( + + )} +
+
+
+

{app.name}

+ + {app.is_active ? "Active" : "Disabled"} + +
+ {app.description && ( +

+ {app.description} +

+ )} +
+
+ + {/* Client ID */} +
+ + Client ID + + + {app.client_id} + +
+ + {/* Footer: Created date and Actions */} +
+ + Created {new Date(app.created_at).toLocaleDateString()} + +
+ + { + fileInputRefs.current[app.id] = el; + }} + onChange={(e) => handleFileChange(app.id, e)} + accept="image/jpeg,image/png,image/webp" + className="hidden" + /> + +
+
+
+ ))} +
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts new file mode 100644 index 0000000000..5b5afc5783 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts @@ -0,0 +1,110 @@ +"use client"; + +import { useState } from "react"; +import { + useGetOauthListMyOauthApps, + usePatchOauthUpdateAppStatus, + usePostOauthUploadAppLogo, + getGetOauthListMyOauthAppsQueryKey, +} from "@/app/api/__generated__/endpoints/oauth/oauth"; +import { OAuthApplicationInfo } from "@/app/api/__generated__/models/oAuthApplicationInfo"; +import { okData } from "@/app/api/helpers"; +import { useToast } from "@/components/molecules/Toast/use-toast"; +import { getQueryClient } from "@/lib/react-query/queryClient"; + +export const useOAuthApps = () => { + const queryClient = getQueryClient(); + const { toast } = useToast(); + const [updatingAppId, setUpdatingAppId] = useState(null); + const [uploadingAppId, setUploadingAppId] = useState(null); + + const { data: oauthAppsResponse, isLoading } = useGetOauthListMyOauthApps({ + query: { select: okData }, + }); + + const { mutateAsync: updateStatus } = usePatchOauthUpdateAppStatus({ + mutation: { + onSettled: () => { + return queryClient.invalidateQueries({ + queryKey: getGetOauthListMyOauthAppsQueryKey(), + }); + }, + }, + }); + + const { mutateAsync: uploadLogo } = usePostOauthUploadAppLogo({ + mutation: { + onSettled: () => { + return queryClient.invalidateQueries({ + queryKey: getGetOauthListMyOauthAppsQueryKey(), + }); + }, + }, + }); + + const handleToggleStatus = async (appId: string, currentStatus: boolean) => { + try { + setUpdatingAppId(appId); + const result = await updateStatus({ + appId, + data: { is_active: !currentStatus }, + }); + + if (result.status === 200) { + toast({ + title: "Success", + description: `Application ${result.data.is_active ? "enabled" : "disabled"} successfully`, + }); + } else { + throw new Error("Failed to update status"); + } + } catch { + toast({ + title: "Error", + description: "Failed to update application status", + variant: "destructive", + }); + } finally { + setUpdatingAppId(null); + } + }; + + const handleUploadLogo = async (appId: string, file: File) => { + try { + setUploadingAppId(appId); + const result = await uploadLogo({ + appId, + data: { file }, + }); + + if (result.status === 200) { + toast({ + title: "Success", + description: "Logo uploaded successfully", + }); + } else { + throw new Error("Failed to upload logo"); + } + } catch (error) { + console.error("Failed to upload logo:", error); + const errorMessage = + error instanceof Error ? error.message : "Failed to upload logo"; + toast({ + title: "Error", + description: errorMessage, + variant: "destructive", + }); + } finally { + setUploadingAppId(null); + } + }; + + return { + oauthApps: oauthAppsResponse ?? [], + isLoading, + updatingAppId, + uploadingAppId, + handleToggleStatus, + handleUploadLogo, + }; +}; diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx new file mode 100644 index 0000000000..4251bb954e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/page.tsx @@ -0,0 +1,21 @@ +import { Metadata } from "next/types"; +import { Text } from "@/components/atoms/Text/Text"; +import { OAuthAppsSection } from "./components/OAuthAppsSection"; + +export const metadata: Metadata = { title: "OAuth Apps - AutoGPT Platform" }; + +const OAuthAppsPage = () => { + return ( +
+
+ OAuth Applications + + Manage your OAuth applications that use the AutoGPT Platform API + +
+ +
+ ); +}; + +export default OAuthAppsPage; diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx b/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx index 53c47eeba7..b565699426 100644 --- a/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/signup/page.tsx @@ -21,8 +21,16 @@ import { WarningOctagonIcon } from "@phosphor-icons/react/dist/ssr"; import { LoadingSignup } from "./components/LoadingSignup"; import { useSignupPage } from "./useSignupPage"; import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner"; +import { useSearchParams } from "next/navigation"; export default function SignupPage() { + const searchParams = useSearchParams(); + const nextUrl = searchParams.get("next"); + // Preserve next parameter when switching between login/signup + const loginHref = nextUrl + ? `/login?next=${encodeURIComponent(nextUrl)}` + : "/login"; + const { form, feedback, @@ -186,7 +194,7 @@ export default function SignupPage() { diff --git a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts index 23ee8fb57c..e6d7c68aef 100644 --- a/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/signup/useSignupPage.ts @@ -3,7 +3,7 @@ import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { environment } from "@/services/environment"; import { LoginProvider, signupFormSchema } from "@/types/auth"; import { zodResolver } from "@hookform/resolvers/zod"; -import { useRouter } from "next/navigation"; +import { useRouter, useSearchParams } from "next/navigation"; import { useEffect, useState } from "react"; import { useForm } from "react-hook-form"; import z from "zod"; @@ -14,17 +14,21 @@ export function useSignupPage() { const [feedback, setFeedback] = useState(null); const { toast } = useToast(); const router = useRouter(); + const searchParams = useSearchParams(); const [isLoading, setIsLoading] = useState(false); const [isSigningUp, setIsSigningUp] = useState(false); const [isGoogleLoading, setIsGoogleLoading] = useState(false); const [showNotAllowedModal, setShowNotAllowedModal] = useState(false); const isCloudEnv = environment.isCloud(); + // Get redirect destination from 'next' query parameter + const nextUrl = searchParams.get("next"); + useEffect(() => { if (isLoggedIn && !isSigningUp) { - router.push("/marketplace"); + router.push(nextUrl || "/marketplace"); } - }, [isLoggedIn, isSigningUp]); + }, [isLoggedIn, isSigningUp, nextUrl, router]); const form = useForm>({ resolver: zodResolver(signupFormSchema), @@ -41,10 +45,16 @@ export function useSignupPage() { setIsSigningUp(true); try { + // Include next URL in OAuth flow if present + const callbackUrl = nextUrl + ? `/auth/callback?next=${encodeURIComponent(nextUrl)}` + : `/auth/callback`; + const fullCallbackUrl = `${window.location.origin}${callbackUrl}`; + const response = await fetch("/api/auth/provider", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ provider }), + body: JSON.stringify({ provider, redirectTo: fullCallbackUrl }), }); if (!response.ok) { @@ -118,8 +128,9 @@ export function useSignupPage() { return; } - const next = result.next || "/"; - if (next) router.replace(next); + // Prefer the URL's next parameter, then result.next (for onboarding), then default + const redirectTo = nextUrl || result.next || "/"; + router.replace(redirectTo); } catch (error) { setIsLoading(false); setIsSigningUp(false); diff --git a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts index 0a31eb6942..315b68ab87 100644 --- a/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts +++ b/autogpt_platform/frontend/src/app/api/mutators/custom-mutator.ts @@ -113,6 +113,19 @@ export const customMutator = async < body: data, }); + // Check if response is a redirect (3xx) and redirect is allowed + const allowRedirect = requestOptions.redirect !== "error"; + const isRedirect = response.status >= 300 && response.status < 400; + + // For redirect responses, return early without trying to parse body + if (allowRedirect && isRedirect) { + return { + status: response.status, + data: null, + headers: response.headers, + } as T; + } + if (!response.ok) { let responseData: any = null; try { diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index f8c5563476..3556e2f5c7 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -5370,6 +5370,369 @@ } } }, + "/api/oauth/app/{client_id}": { + "get": { + "tags": ["oauth"], + "summary": "Get Oauth App Info", + "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", + "operationId": "getOauthGetOauthAppInfo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "client_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Client Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationPublicInfo" + } + } + } + }, + "404": { "description": "Application not found or disabled" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/authorize": { + "post": { + "tags": ["oauth"], + "summary": "Authorize", + "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", + "operationId": "postOauthAuthorize", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/token": { + "post": { + "tags": ["oauth"], + "summary": "Token", + "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", + "operationId": "postOauthToken", + "requestBody": { + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/TokenRequestByCode" }, + { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } + ], + "title": "Request" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TokenResponse" } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/introspect": { + "post": { + "tags": ["oauth"], + "summary": "Introspect", + "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", + "operationId": "postOauthIntrospect", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthIntrospect" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TokenIntrospectionResult" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/revoke": { + "post": { + "tags": ["oauth"], + "summary": "Revoke", + "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", + "operationId": "postOauthRevoke", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/mine": { + "get": { + "tags": ["oauth"], + "summary": "List My Oauth Apps", + "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", + "operationId": "getOauthListMyOauthApps", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + }, + "type": "array", + "title": "Response Getoauthlistmyoauthapps" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/apps/{app_id}/status": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Status", + "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppStatus", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/apps/{app_id}/logo": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Logo", + "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, + "/api/oauth/apps/{app_id}/logo/upload": { + "post": { + "tags": ["oauth"], + "summary": "Upload App Logo", + "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", + "operationId": "postOauthUploadAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + } + } + }, "/health": { "get": { "tags": ["health"], @@ -5418,29 +5781,30 @@ }, "APIKeyInfo": { "properties": { - "id": { "type": "string", "title": "Id" }, - "name": { "type": "string", "title": "Name" }, - "head": { - "type": "string", - "title": "Head", - "description": "The first 8 characters of the key" - }, - "tail": { - "type": "string", - "title": "Tail", - "description": "The last 8 characters of the key" - }, - "status": { "$ref": "#/components/schemas/APIKeyStatus" }, - "permissions": { + "user_id": { "type": "string", "title": "User Id" }, + "scopes": { "items": { "$ref": "#/components/schemas/APIKeyPermission" }, "type": "array", - "title": "Permissions" + "title": "Scopes" + }, + "type": { + "type": "string", + "const": "api_key", + "title": "Type", + "default": "api_key" }, "created_at": { "type": "string", "format": "date-time", "title": "Created At" }, + "expires_at": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Expires At" + }, "last_used_at": { "anyOf": [ { "type": "string", "format": "date-time" }, @@ -5455,28 +5819,41 @@ ], "title": "Revoked At" }, + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "head": { + "type": "string", + "title": "Head", + "description": "The first 8 characters of the key" + }, + "tail": { + "type": "string", + "title": "Tail", + "description": "The last 8 characters of the key" + }, + "status": { "$ref": "#/components/schemas/APIKeyStatus" }, "description": { "anyOf": [{ "type": "string" }, { "type": "null" }], "title": "Description" - }, - "user_id": { "type": "string", "title": "User Id" } + } }, "type": "object", "required": [ + "user_id", + "scopes", + "created_at", "id", "name", "head", "tail", - "status", - "permissions", - "created_at", - "user_id" + "status" ], "title": "APIKeyInfo" }, "APIKeyPermission": { "type": "string", "enum": [ + "IDENTITY", "EXECUTE_GRAPH", "READ_GRAPH", "EXECUTE_BLOCK", @@ -5614,6 +5991,72 @@ "required": ["answer", "documents", "success"], "title": "ApiResponse" }, + "AuthorizeRequest": { + "properties": { + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "redirect_uri": { + "type": "string", + "title": "Redirect Uri", + "description": "Redirect URI" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes", + "description": "List of scopes" + }, + "state": { + "type": "string", + "title": "State", + "description": "Anti-CSRF token from client" + }, + "response_type": { + "type": "string", + "title": "Response Type", + "description": "Must be 'code' for authorization code flow", + "default": "code" + }, + "code_challenge": { + "type": "string", + "title": "Code Challenge", + "description": "PKCE code challenge (required)" + }, + "code_challenge_method": { + "type": "string", + "enum": ["S256", "plain"], + "title": "Code Challenge Method", + "description": "PKCE code challenge method (S256 recommended)", + "default": "S256" + } + }, + "type": "object", + "required": [ + "client_id", + "redirect_uri", + "scopes", + "state", + "code_challenge" + ], + "title": "AuthorizeRequest", + "description": "OAuth 2.0 authorization request" + }, + "AuthorizeResponse": { + "properties": { + "redirect_url": { + "type": "string", + "title": "Redirect Url", + "description": "URL to redirect the user to" + } + }, + "type": "object", + "required": ["redirect_url"], + "title": "AuthorizeResponse", + "description": "OAuth 2.0 authorization response with redirect URL" + }, "AutoTopUpConfig": { "properties": { "amount": { "type": "integer", "title": "Amount" }, @@ -5863,6 +6306,86 @@ "required": ["blocks", "pagination"], "title": "BlockResponse" }, + "Body_patchOauthUpdateAppStatus": { + "properties": { + "is_active": { + "type": "boolean", + "title": "Is Active", + "description": "Whether the app should be active" + } + }, + "type": "object", + "required": ["is_active"], + "title": "Body_patchOauthUpdateAppStatus" + }, + "Body_postOauthIntrospect": { + "properties": { + "token": { + "type": "string", + "title": "Token", + "description": "Token to introspect" + }, + "token_type_hint": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type Hint", + "description": "Hint about token type ('access_token' or 'refresh_token')" + }, + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "Client secret" + } + }, + "type": "object", + "required": ["token", "client_id", "client_secret"], + "title": "Body_postOauthIntrospect" + }, + "Body_postOauthRevoke": { + "properties": { + "token": { + "type": "string", + "title": "Token", + "description": "Token to revoke" + }, + "token_type_hint": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type Hint", + "description": "Hint about token type ('access_token' or 'refresh_token')" + }, + "client_id": { + "type": "string", + "title": "Client Id", + "description": "Client identifier" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "Client secret" + } + }, + "type": "object", + "required": ["token", "client_id", "client_secret"], + "title": "Body_postOauthRevoke" + }, + "Body_postOauthUploadAppLogo": { + "properties": { + "file": { "type": "string", "format": "binary", "title": "File" } + }, + "type": "object", + "required": ["file"], + "title": "Body_postOauthUploadAppLogo" + }, "Body_postV1Exchange_oauth_code_for_tokens": { "properties": { "code": { @@ -7855,6 +8378,85 @@ "required": ["provider", "access_token", "scopes"], "title": "OAuth2Credentials" }, + "OAuthApplicationInfo": { + "properties": { + "id": { "type": "string", "title": "Id" }, + "name": { "type": "string", "title": "Name" }, + "description": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Description" + }, + "logo_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Logo Url" + }, + "client_id": { "type": "string", "title": "Client Id" }, + "redirect_uris": { + "items": { "type": "string" }, + "type": "array", + "title": "Redirect Uris" + }, + "grant_types": { + "items": { "type": "string" }, + "type": "array", + "title": "Grant Types" + }, + "scopes": { + "items": { "$ref": "#/components/schemas/APIKeyPermission" }, + "type": "array", + "title": "Scopes" + }, + "owner_id": { "type": "string", "title": "Owner Id" }, + "is_active": { "type": "boolean", "title": "Is Active" }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + }, + "updated_at": { + "type": "string", + "format": "date-time", + "title": "Updated At" + } + }, + "type": "object", + "required": [ + "id", + "name", + "client_id", + "redirect_uris", + "grant_types", + "scopes", + "owner_id", + "is_active", + "created_at", + "updated_at" + ], + "title": "OAuthApplicationInfo", + "description": "OAuth application information (without client secret hash)" + }, + "OAuthApplicationPublicInfo": { + "properties": { + "name": { "type": "string", "title": "Name" }, + "description": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Description" + }, + "logo_url": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Logo Url" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes" + } + }, + "type": "object", + "required": ["name", "scopes"], + "title": "OAuthApplicationPublicInfo", + "description": "Public information about an OAuth application (for consent screen)" + }, "OnboardingStep": { "type": "string", "enum": [ @@ -9892,6 +10494,134 @@ "required": ["timezone"], "title": "TimezoneResponse" }, + "TokenIntrospectionResult": { + "properties": { + "active": { "type": "boolean", "title": "Active" }, + "scopes": { + "anyOf": [ + { "items": { "type": "string" }, "type": "array" }, + { "type": "null" } + ], + "title": "Scopes" + }, + "client_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Client Id" + }, + "user_id": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "User Id" + }, + "exp": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Exp" + }, + "token_type": { + "anyOf": [ + { "type": "string", "enum": ["access_token", "refresh_token"] }, + { "type": "null" } + ], + "title": "Token Type" + } + }, + "type": "object", + "required": ["active"], + "title": "TokenIntrospectionResult", + "description": "Result of token introspection (RFC 7662)" + }, + "TokenRequestByCode": { + "properties": { + "grant_type": { + "type": "string", + "const": "authorization_code", + "title": "Grant Type" + }, + "code": { + "type": "string", + "title": "Code", + "description": "Authorization code" + }, + "redirect_uri": { + "type": "string", + "title": "Redirect Uri", + "description": "Redirect URI (must match authorization request)" + }, + "client_id": { "type": "string", "title": "Client Id" }, + "client_secret": { "type": "string", "title": "Client Secret" }, + "code_verifier": { + "type": "string", + "title": "Code Verifier", + "description": "PKCE code verifier" + } + }, + "type": "object", + "required": [ + "grant_type", + "code", + "redirect_uri", + "client_id", + "client_secret", + "code_verifier" + ], + "title": "TokenRequestByCode" + }, + "TokenRequestByRefreshToken": { + "properties": { + "grant_type": { + "type": "string", + "const": "refresh_token", + "title": "Grant Type" + }, + "refresh_token": { "type": "string", "title": "Refresh Token" }, + "client_id": { "type": "string", "title": "Client Id" }, + "client_secret": { "type": "string", "title": "Client Secret" } + }, + "type": "object", + "required": [ + "grant_type", + "refresh_token", + "client_id", + "client_secret" + ], + "title": "TokenRequestByRefreshToken" + }, + "TokenResponse": { + "properties": { + "token_type": { + "type": "string", + "const": "Bearer", + "title": "Token Type", + "default": "Bearer" + }, + "access_token": { "type": "string", "title": "Access Token" }, + "access_token_expires_at": { + "type": "string", + "format": "date-time", + "title": "Access Token Expires At" + }, + "refresh_token": { "type": "string", "title": "Refresh Token" }, + "refresh_token_expires_at": { + "type": "string", + "format": "date-time", + "title": "Refresh Token Expires At" + }, + "scopes": { + "items": { "type": "string" }, + "type": "array", + "title": "Scopes" + } + }, + "type": "object", + "required": [ + "access_token", + "access_token_expires_at", + "refresh_token", + "refresh_token_expires_at", + "scopes" + ], + "title": "TokenResponse", + "description": "OAuth 2.0 token response" + }, "TransactionHistory": { "properties": { "transactions": { @@ -9938,6 +10668,18 @@ "required": ["name", "graph_id", "graph_version", "trigger_config"], "title": "TriggeredPresetSetupRequest" }, + "UpdateAppLogoRequest": { + "properties": { + "logo_url": { + "type": "string", + "title": "Logo Url", + "description": "URL of the uploaded logo image" + } + }, + "type": "object", + "required": ["logo_url"], + "title": "UpdateAppLogoRequest" + }, "UpdatePermissionsRequest": { "properties": { "permissions": { diff --git a/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx b/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx index 4269ae5415..843330b085 100644 --- a/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx +++ b/autogpt_platform/frontend/src/components/molecules/ErrorCard/ErrorCard.tsx @@ -7,6 +7,7 @@ import { ActionButtons } from "./components/ActionButtons"; export interface ErrorCardProps { isSuccess?: boolean; + isOurProblem?: boolean; responseError?: { detail?: Array<{ msg: string }> | string; message?: string; @@ -17,15 +18,18 @@ export interface ErrorCardProps { message?: string; }; context?: string; + hint?: string; onRetry?: () => void; className?: string; } export function ErrorCard({ isSuccess = false, + isOurProblem = true, responseError, httpError, context = "data", + hint, onRetry, className = "", }: ErrorCardProps) { @@ -50,13 +54,19 @@ export function ErrorCard({
- - + {isOurProblem && ( + + )}
); diff --git a/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx b/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx index f232e6ff3f..bfb3726de1 100644 --- a/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx +++ b/autogpt_platform/frontend/src/components/molecules/ErrorCard/components/ErrorMessage.tsx @@ -4,9 +4,10 @@ import { Text } from "@/components/atoms/Text/Text"; interface Props { errorMessage: string; context: string; + hint?: string; } -export function ErrorMessage({ errorMessage, context }: Props) { +export function ErrorMessage({ errorMessage, context, hint }: Props) { return (
@@ -17,6 +18,13 @@ export function ErrorMessage({ errorMessage, context }: Props) { {errorMessage}
+ {hint && ( +
+ + {hint} + +
+ )}
); } diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts index 0d8be1df5d..2f27ef126d 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts @@ -912,7 +912,7 @@ export interface APIKey { prefix: string; postfix: string; status: APIKeyStatus; - permissions: APIKeyPermission[]; + scopes: APIKeyPermission[]; created_at: string; last_used_at?: string; revoked_at?: string; diff --git a/autogpt_platform/frontend/src/lib/supabase/helpers.ts b/autogpt_platform/frontend/src/lib/supabase/helpers.ts index 7b2d36a0fd..f41c8c2f0f 100644 --- a/autogpt_platform/frontend/src/lib/supabase/helpers.ts +++ b/autogpt_platform/frontend/src/lib/supabase/helpers.ts @@ -4,6 +4,8 @@ import { type CookieOptions } from "@supabase/ssr"; import { SupabaseClient } from "@supabase/supabase-js"; export const PROTECTED_PAGES = [ + "/auth/authorize", + "/auth/integrations", "/monitor", "/build", "/onboarding", @@ -59,14 +61,15 @@ export function hasWebSocketDisconnectIntent(): boolean { // Redirect utilities export function getRedirectPath( - pathname: string, + path: string, // including query strings userRole?: string, ): string | null { - if (shouldRedirectOnLogout(pathname)) { - return "/login"; + if (shouldRedirectOnLogout(path)) { + // Preserve the original path as a 'next' parameter so user can return after login + return `/login?next=${encodeURIComponent(path)}`; } - if (isAdminPage(pathname) && userRole !== "admin") { + if (isAdminPage(path) && userRole !== "admin") { return "/marketplace"; } diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts index cce4f7a769..95b9e8bbca 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/helpers.ts @@ -77,7 +77,7 @@ export async function fetchUser(): Promise { } interface ValidateSessionParams { - pathname: string; + path: string; currentUser: User | null; } @@ -92,7 +92,7 @@ export async function validateSession( params: ValidateSessionParams, ): Promise { try { - const result = await validateSessionAction(params.pathname); + const result = await validateSessionAction(params.path); if (!result.isValid) { return { @@ -118,7 +118,7 @@ export async function validateSession( }; } catch (error) { console.error("Session validation error:", error); - const redirectPath = getRedirectPath(params.pathname); + const redirectPath = getRedirectPath(params.path); return { isValid: false, redirectPath, @@ -146,7 +146,7 @@ interface StorageEventHandlerParams { event: StorageEvent; api: BackendAPI | null; router: AppRouterInstance | null; - pathname: string; + path: string; } interface StorageEventHandlerResult { @@ -167,7 +167,7 @@ export function handleStorageEvent( params.api.disconnectWebSocket(); } - const redirectPath = getRedirectPath(params.pathname); + const redirectPath = getRedirectPath(params.path); return { shouldLogout: true, diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts index 41fdee25a2..5f362397f6 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabase.ts @@ -1,8 +1,8 @@ "use client"; import { useBackendAPI } from "@/lib/autogpt-server-api/context"; -import { usePathname, useRouter } from "next/navigation"; -import { useEffect } from "react"; +import { usePathname, useRouter, useSearchParams } from "next/navigation"; +import { useEffect, useMemo } from "react"; import { useShallow } from "zustand/react/shallow"; import type { ServerLogoutOptions } from "../actions"; import { useSupabaseStore } from "./useSupabaseStore"; @@ -10,8 +10,15 @@ import { useSupabaseStore } from "./useSupabaseStore"; export function useSupabase() { const router = useRouter(); const pathname = usePathname(); + const searchParams = useSearchParams(); const api = useBackendAPI(); + // Combine pathname and search params to get full path for redirect preservation + const fullPath = useMemo(() => { + const search = searchParams.toString(); + return search ? `${pathname}?${search}` : pathname; + }, [pathname, searchParams]); + const { user, supabase, @@ -36,9 +43,9 @@ export function useSupabase() { void initialize({ api, router, - pathname, + path: fullPath, }); - }, [api, initialize, pathname, router]); + }, [api, initialize, fullPath, router]); function handleLogout(options: ServerLogoutOptions = {}) { return logOut({ @@ -49,7 +56,7 @@ export function useSupabase() { function handleValidateSession() { return validateSession({ - pathname, + path: fullPath, router, }); } diff --git a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts index dcc6029668..5207397ee4 100644 --- a/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts +++ b/autogpt_platform/frontend/src/lib/supabase/hooks/useSupabaseStore.ts @@ -21,7 +21,7 @@ import { interface InitializeParams { api: BackendAPI; router: AppRouterInstance; - pathname: string; + path: string; } interface LogOutParams { @@ -32,7 +32,7 @@ interface LogOutParams { interface ValidateParams { force?: boolean; - pathname?: string; + path?: string; router?: AppRouterInstance; } @@ -47,7 +47,7 @@ interface SupabaseStoreState { listenersCleanup: (() => void) | null; routerRef: AppRouterInstance | null; apiRef: BackendAPI | null; - currentPathname: string; + currentPath: string; initialize: (params: InitializeParams) => Promise; logOut: (params?: LogOutParams) => Promise; validateSession: (params?: ValidateParams) => Promise; @@ -60,7 +60,7 @@ export const useSupabaseStore = create((set, get) => { set({ routerRef: params.router, apiRef: params.api, - currentPathname: params.pathname, + currentPath: params.path, }); const supabaseClient = ensureSupabaseClient(); @@ -83,7 +83,7 @@ export const useSupabaseStore = create((set, get) => { // This handles race conditions after login where cookies might not be immediately available if (!result.user) { const validationResult = await validateSessionHelper({ - pathname: params.pathname, + path: params.path, currentUser: null, }); @@ -160,7 +160,7 @@ export const useSupabaseStore = create((set, get) => { params?: ValidateParams, ): Promise { const router = params?.router ?? get().routerRef; - const pathname = params?.pathname ?? get().currentPathname; + const pathname = params?.path ?? get().currentPath; if (!router || !pathname) return true; if (!params?.force && get().isValidating) return true; @@ -175,7 +175,7 @@ export const useSupabaseStore = create((set, get) => { try { const result = await validateSessionHelper({ - pathname, + path: pathname, currentUser: get().user, }); @@ -224,7 +224,7 @@ export const useSupabaseStore = create((set, get) => { event, api: get().apiRef, router: get().routerRef, - pathname: get().currentPathname, + path: get().currentPath, }); if (!result.shouldLogout) return; @@ -283,7 +283,7 @@ export const useSupabaseStore = create((set, get) => { listenersCleanup: null, routerRef: null, apiRef: null, - currentPathname: "", + currentPath: "", initialize, logOut, validateSession: validateSessionInternal, diff --git a/autogpt_platform/frontend/src/lib/supabase/middleware.ts b/autogpt_platform/frontend/src/lib/supabase/middleware.ts index 5e04efde67..5e4bd01e83 100644 --- a/autogpt_platform/frontend/src/lib/supabase/middleware.ts +++ b/autogpt_platform/frontend/src/lib/supabase/middleware.ts @@ -57,7 +57,9 @@ export async function updateSession(request: NextRequest) { const attemptingAdminPage = isAdminPage(pathname); if (attemptingProtectedPage || attemptingAdminPage) { + const currentDest = url.pathname + url.search; url.pathname = "/login"; + url.search = `?next=${encodeURIComponent(currentDest)}`; return NextResponse.redirect(url); } } diff --git a/autogpt_platform/frontend/src/middleware.ts b/autogpt_platform/frontend/src/middleware.ts index 65edec41d7..af1c823295 100644 --- a/autogpt_platform/frontend/src/middleware.ts +++ b/autogpt_platform/frontend/src/middleware.ts @@ -9,11 +9,15 @@ export const config = { matcher: [ /* * Match all request paths except for the ones starting with: - * - _next/static (static files) - * - _next/image (image optimization files) - * - favicon.ico (favicon file) + * - /_next/static (static files) + * - /_next/image (image optimization files) + * - /favicon.ico (favicon file) + * - /auth/callback (OAuth callback - needs to work without auth) * Feel free to modify this pattern to include more paths. + * + * Note: /auth/authorize and /auth/integrations/* ARE protected and need + * middleware to run for authentication checks. */ - "/((?!_next/static|_next/image|favicon.ico|auth|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)", + "/((?!_next/static|_next/image|favicon.ico|auth/callback|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)", ], }; diff --git a/autogpt_platform/frontend/src/tests/api-keys.spec.ts b/autogpt_platform/frontend/src/tests/api-keys.spec.ts index a42ae8384e..e2a5575aed 100644 --- a/autogpt_platform/frontend/src/tests/api-keys.spec.ts +++ b/autogpt_platform/frontend/src/tests/api-keys.spec.ts @@ -19,8 +19,8 @@ test.describe("API Keys Page", () => { const page = await context.newPage(); try { - await page.goto("/profile/api_keys"); - await hasUrl(page, "/login"); + await page.goto("/profile/api-keys"); + await hasUrl(page, "/login?next=%2Fprofile%2Fapi-keys"); } finally { await page.close(); await context.close(); @@ -29,7 +29,7 @@ test.describe("API Keys Page", () => { test("should create a new API key successfully", async ({ page }) => { const { getButton, getField } = getSelectors(page); - await page.goto("/profile/api_keys"); + await page.goto("/profile/api-keys"); await getButton("Create Key").click(); await getField("Name").fill("Test Key"); @@ -45,7 +45,7 @@ test.describe("API Keys Page", () => { test("should revoke an existing API key", async ({ page }) => { const { getRole, getId } = getSelectors(page); - await page.goto("/profile/api_keys"); + await page.goto("/profile/api-keys"); const apiKeyRow = getId("api-key-row").first(); const apiKeyContent = await apiKeyRow diff --git a/autogpt_platform/frontend/src/tests/profile-form.spec.ts b/autogpt_platform/frontend/src/tests/profile-form.spec.ts index 527c5cca92..1fc1008e9c 100644 --- a/autogpt_platform/frontend/src/tests/profile-form.spec.ts +++ b/autogpt_platform/frontend/src/tests/profile-form.spec.ts @@ -24,7 +24,7 @@ test.describe("Profile Form", () => { try { await page.goto("/profile"); - await hasUrl(page, "/login"); + await hasUrl(page, "/login?next=%2Fprofile"); } finally { await page.close(); await context.close(); diff --git a/autogpt_platform/frontend/src/tests/signin.spec.ts b/autogpt_platform/frontend/src/tests/signin.spec.ts index 6e53855a8e..0f36006c4d 100644 --- a/autogpt_platform/frontend/src/tests/signin.spec.ts +++ b/autogpt_platform/frontend/src/tests/signin.spec.ts @@ -152,10 +152,10 @@ test("multi-tab logout with WebSocket cleanup", async ({ context }) => { // Check if Tab 2 has been redirected to login or refresh the page to trigger redirect try { await page2.reload(); - await hasUrl(page2, "/login"); + await hasUrl(page2, "/login?next=%2Fbuild"); } catch { // If reload fails, the page might already be redirecting - await hasUrl(page2, "/login"); + await hasUrl(page2, "/login?next=%2Fbuild"); } // Verify the profile menu is no longer visible (user is logged out) diff --git a/docs/content/platform/integrating/api-guide.md b/docs/content/platform/integrating/api-guide.md new file mode 100644 index 0000000000..19d210af91 --- /dev/null +++ b/docs/content/platform/integrating/api-guide.md @@ -0,0 +1,85 @@ +# AutoGPT Platform External API Guide + +The AutoGPT Platform provides an External API that allows you to programmatically interact with agents, blocks, the store, and more. + +## API Documentation + +Full API documentation with interactive examples is available at: + +**[https://backend.agpt.co/external-api/docs](https://backend.agpt.co/external-api/docs)** + +This Swagger UI documentation includes all available endpoints, request/response schemas, and allows you to try out API calls directly. + +## Authentication Methods + +The External API supports two authentication methods: + +### 1. API Keys + +API keys are the simplest way to authenticate. Generate an API key from your AutoGPT Platform account settings and include it in your requests: + +```http +GET /external-api/v1/blocks +X-API-Key: your_api_key_here +``` + +API keys are ideal for: +- Server-to-server integrations +- Personal scripts and automation +- Backend services + +### 2. OAuth 2.0 (Single Sign-On) + +For applications that need to act on behalf of users, use OAuth 2.0. This allows users to authorize your application to access their AutoGPT resources. + +OAuth is ideal for: +- Third-party applications +- "Sign in with AutoGPT" (SSO, Single Sign-On) functionality +- Applications that need user-specific permissions + +See the [SSO Integration Guide](sso-guide.md) for complete OAuth implementation details. + +## Available Scopes + +When using OAuth, request only the scopes your application needs: + +| Scope | Description | +|-------|-------------| +| `IDENTITY` | Read user ID, e-mail, and timezone | +| `EXECUTE_GRAPH` | Run agents | +| `READ_GRAPH` | Read agent run results | +| `EXECUTE_BLOCK` | Run individual blocks | +| `READ_BLOCK` | Read block definitions | +| `READ_STORE` | Access the agent store | +| `USE_TOOLS` | Use platform tools | +| `MANAGE_INTEGRATIONS` | Create and update user integrations | +| `READ_INTEGRATIONS` | Read user integration status | +| `DELETE_INTEGRATIONS` | Remove user integrations | + +## Quick Start + +### Using an API Key + +```bash +# List available blocks +curl -H "X-API-Key: YOUR_API_KEY" \ + https://backend.agpt.co/external-api/v1/blocks +``` + +### Using OAuth + +1. Register an OAuth application (contact platform administrator) +2. Implement the OAuth flow as described in the [SSO Guide](sso-guide.md) +3. Use the obtained access token: + +```bash +curl -H "Authorization: Bearer agpt_xt_..." \ + https://backend.agpt.co/external-api/v1/blocks +``` + +## Support + +For issues or questions about API integration: + +- Open an issue on [GitHub](https://github.com/Significant-Gravitas/AutoGPT) +- Check the [Swagger documentation](https://backend.agpt.co/external-api/docs) diff --git a/docs/content/platform/integrating/oauth-guide.md b/docs/content/platform/integrating/oauth-guide.md new file mode 100644 index 0000000000..d88ef385e1 --- /dev/null +++ b/docs/content/platform/integrating/oauth-guide.md @@ -0,0 +1,440 @@ +# AutoGPT Platform OAuth Integration Guide + +This guide explains how to integrate your application with AutoGPT Platform using OAuth 2.0. OAuth can be used for API access, Single Sign-On (SSO), or both. + +For general API information and endpoint documentation, see the [API Guide](api-guide.md) and the [Swagger documentation](https://backend.agpt.co/external-api/docs). + +## Overview + +AutoGPT Platform's OAuth implementation supports multiple use cases: + +### OAuth for API Access + +Use OAuth when your application needs to call AutoGPT APIs on behalf of users. This is the most common use case for third-party integrations. + +**When to use:** + +- Your app needs to run agents, access the store, or manage integrations for users +- You want user-specific permissions rather than a single API key +- Users should be able to revoke access to your app + +### SSO: "Sign in with AutoGPT" + +Use SSO when you want users to sign in to your app through their AutoGPT account. Request the `IDENTITY` scope to get user information. + +**When to use:** + +- You want to use AutoGPT as an identity provider +- Users already have AutoGPT accounts and you want seamless login +- You need to identify users without managing passwords + +**Note:** SSO and API access can be combined. Request `IDENTITY` along with other scopes to both authenticate users and access APIs on their behalf. + +### Integration Setup Wizard + +A separate flow that guides users through connecting third-party services (GitHub, Google, etc.) to their AutoGPT account. See [Integration Setup Wizard](#integration-setup-wizard) below. + +## Prerequisites + +Before integrating, you need an OAuth application registered with AutoGPT Platform. Contact the platform administrator to obtain: + +- **Client ID** - Public identifier for your application +- **Client Secret** - Secret key for authenticating your application (keep this secure!) +- **Registered Redirect URIs** - URLs where users will be redirected after authorization + +## OAuth Flow + +The OAuth flow is technically the same whether you're using it for API access, SSO, or both. The main difference is which scopes you request. + +### Step 1: Redirect User to Authorization + +Redirect the user to the AutoGPT authorization page with the required parameters: + +```url +https://platform.agpt.co/auth/authorize? + client_id={YOUR_CLIENT_ID}& + redirect_uri=https://yourapp.com/callback& + scope=EXECUTE_GRAPH READ_GRAPH& + state={RANDOM_STATE_TOKEN}& + code_challenge={PKCE_CHALLENGE}& + code_challenge_method=S256& + response_type=code +``` + +#### Parameters + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `client_id` | Yes | Your OAuth application's client ID | +| `redirect_uri` | Yes | URL to redirect after authorization (must match registered URI) | +| `scope` | Yes | Space-separated list of permissions (see [Available Scopes](api-guide.md#available-scopes)) | +| `state` | Yes | Random string to prevent CSRF attacks (store and verify on callback) | +| `code_challenge` | Yes | PKCE code challenge (see [PKCE](#pkce-implementation)) | +| `code_challenge_method` | Yes | Must be `S256` | +| `response_type` | Yes | Must be `code` | + +### Step 2: Handle the Callback + +After the user approves (or denies) access, they'll be redirected to your `redirect_uri`: + +**Success:** + +```url +https://yourapp.com/callback?code=AUTHORIZATION_CODE&state=RANDOM_STATE_TOKEN +``` + +**Error:** + +```url +https://yourapp.com/callback?error=access_denied&error_description=User%20denied%20access&state=RANDOM_STATE_TOKEN +``` + +Always verify the `state` parameter matches what you sent in Step 1. + +### Step 3: Exchange Code for Tokens + +Exchange the authorization code for access and refresh tokens: + +```http +POST /api/oauth/token +Content-Type: application/json + +{ + "grant_type": "authorization_code", + "code": "{AUTHORIZATION_CODE}", + "redirect_uri": "https://yourapp.com/callback", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}", + "code_verifier": "{PKCE_VERIFIER}" +} +``` + +**Response:** + +```json +{ + "token_type": "Bearer", + "access_token": "agpt_xt_...", + "access_token_expires_at": "2025-01-15T12:00:00Z", + "refresh_token": "agpt_rt_...", + "refresh_token_expires_at": "2025-02-14T12:00:00Z", + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"] +} +``` + +### Step 4: Use the Access Token + +Include the access token in API requests: + +```http +GET /external-api/v1/blocks +Authorization: Bearer agpt_xt_... +``` + +**For SSO:** If you requested the `IDENTITY` scope, fetch user info to identify the user: + +```http +GET /external-api/v1/me +Authorization: Bearer agpt_xt_... +``` + +**Response:** + +```json +{ + "id": "user-uuid", + "name": "John Doe", + "email": "john@example.com", + "timezone": "Europe/Amsterdam" +} +``` + +See the [Swagger documentation](https://backend.agpt.co/external-api/docs) for all available endpoints. + +### Step 5: Refresh Tokens + +Access tokens expire after 1 hour. Use the refresh token to get new tokens: + +```http +POST /api/oauth/token +Content-Type: application/json + +{ + "grant_type": "refresh_token", + "refresh_token": "agpt_rt_...", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +**Response:** + +```json +{ + "token_type": "Bearer", + "access_token": "agpt_xt_...", + "access_token_expires_at": "2025-01-15T13:00:00Z", + "refresh_token": "agpt_rt_...", + "refresh_token_expires_at": "2025-02-14T12:00:00Z", + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"] +} +``` + +## Integration Setup Wizard + +The Integration Setup Wizard guides users through connecting third-party services (like GitHub, Google, etc.) to their AutoGPT account. This is useful when your application needs users to have specific integrations configured. + +### Redirect to the Wizard + +```url +https://platform.agpt.co/auth/integrations/setup-wizard? + client_id={YOUR_CLIENT_ID}& + providers={BASE64_ENCODED_PROVIDERS}& + redirect_uri=https://yourapp.com/callback& + state={RANDOM_STATE_TOKEN} +``` + +#### Parameters + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `client_id` | Yes | Your OAuth application's client ID | +| `providers` | Yes | Base64-encoded JSON array of provider configurations | +| `redirect_uri` | Yes | URL to redirect after setup completes | +| `state` | Yes | Random string to prevent CSRF attacks | + +#### Provider Configuration + +The `providers` parameter is a Base64-encoded JSON array: + +```javascript +const providers = [ + { provider: 'github', scopes: ['repo', 'read:user'] }, + { provider: 'google', scopes: ['https://www.googleapis.com/auth/calendar'] }, + { provider: 'slack' } // Uses default scopes +]; + +const providersBase64 = btoa(JSON.stringify(providers)); +``` + +### Handle the Callback + +After setup completes: + +**Success:** + +```url +https://yourapp.com/callback?success=true&state=RANDOM_STATE_TOKEN +``` + +**Failure/Cancelled:** + +```url +https://yourapp.com/callback?success=false&state=RANDOM_STATE_TOKEN +``` + +## Provider Scopes Reference + +When using the Integration Setup Wizard, you need to specify which scopes to request from each provider. Here are common providers and their scopes: + +### GitHub + +Documentation: https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps + +| Scope | Description | +|-------|-------------| +| `repo` | Full control of private repositories | +| `read:user` | Read user profile data | +| `user:email` | Access user email addresses | +| `gist` | Create and manage gists | +| `workflow` | Update GitHub Actions workflows | + +**Example:** + +```javascript +{ provider: 'github', scopes: ['repo', 'read:user'] } +``` + +### Google + +Documentation: https://developers.google.com/identity/protocols/oauth2/scopes + +| Scope | Description | +|-------|-------------| +| `email` | View email address (default) | +| `profile` | View basic profile info (default) | +| `openid` | OpenID Connect (default) | +| `https://www.googleapis.com/auth/calendar` | Google Calendar access | +| `https://www.googleapis.com/auth/drive` | Google Drive access | +| `https://www.googleapis.com/auth/gmail.readonly` | Read Gmail messages | + +**Example:** + +```javascript +{ provider: 'google', scopes: ['https://www.googleapis.com/auth/calendar'] } +// Or use defaults (email, profile, openid): +{ provider: 'google' } +``` + +### Notion + +Documentation: https://developers.notion.com/reference/capabilities + +Notion uses a single OAuth scope that grants access based on pages the user selects during authorization. + +### Linear + +Documentation: https://developers.linear.app/docs/oauth/authentication + +| Scope | Description | +|-------|-------------| +| `read` | Read access to Linear data | +| `write` | Write access to Linear data | +| `issues:create` | Create issues | + +## PKCE Implementation + +PKCE (Proof Key for Code Exchange) is required for all authorization requests. Here's how to implement it: + +### JavaScript Example + +```javascript +async function generatePkce() { + // Generate a random code verifier + const array = new Uint8Array(32); + crypto.getRandomValues(array); + const verifier = Array.from(array, b => b.toString(16).padStart(2, '0')).join(''); + + // Create SHA-256 hash and base64url encode it + const hash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(verifier)); + const challenge = btoa(String.fromCharCode(...new Uint8Array(hash))) + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=+$/, ''); + + return { verifier, challenge }; +} + +// Usage: +const pkce = await generatePkce(); +// Store pkce.verifier securely (e.g., in session storage) +// Use pkce.challenge in the authorization URL +``` + +### Python Example + +```python +import hashlib +import base64 +import secrets + +def generate_pkce(): + # Generate a random code verifier + verifier = secrets.token_urlsafe(32) + + # Create SHA-256 hash and base64url encode it + digest = hashlib.sha256(verifier.encode()).digest() + challenge = base64.urlsafe_b64encode(digest).decode().rstrip('=') + + return verifier, challenge + +# Usage: +verifier, challenge = generate_pkce() +# Store verifier securely in session +# Use challenge in the authorization URL +``` + +## Token Management + +### Token Lifetimes + +| Token Type | Lifetime | +|------------|----------| +| Access Token | 1 hour | +| Refresh Token | 30 days | +| Authorization Code | 10 minutes | + +### Token Introspection + +Check if a token is valid: + +```http +POST /api/oauth/introspect +Content-Type: application/json + +{ + "token": "agpt_xt_...", + "token_type_hint": "access_token", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +**Response:** + +```json +{ + "active": true, + "scopes": ["EXECUTE_GRAPH", "READ_GRAPH"], + "client_id": "agpt_client_...", + "user_id": "user-uuid", + "exp": 1705320000, + "token_type": "access_token" +} +``` + +### Token Revocation + +Revoke a token when the user logs out: + +```http +POST /api/oauth/revoke +Content-Type: application/json + +{ + "token": "agpt_xt_...", + "token_type_hint": "access_token", + "client_id": "{YOUR_CLIENT_ID}", + "client_secret": "{YOUR_CLIENT_SECRET}" +} +``` + +## Security Best Practices + +1. **Store client secrets securely** - Never expose them in client-side code or version control +2. **Always use PKCE** - Required for all authorization requests +3. **Validate state parameters** - Prevents CSRF attacks +4. **Use HTTPS** - All production redirect URIs must use HTTPS +5. **Request minimal scopes** - Only request the permissions your app needs +6. **Handle token expiration** - Implement automatic token refresh +7. **Revoke tokens on logout** - Clean up when users disconnect your app + +## Error Handling + +### Common OAuth Errors + +| Error | Description | Solution | +|-------|-------------|----------| +| `invalid_client` | Client ID not found or inactive | Verify client ID is correct | +| `invalid_redirect_uri` | Redirect URI not registered | Register URI with platform admin | +| `invalid_scope` | Requested scope not allowed | Check allowed scopes for your app | +| `invalid_grant` | Code expired or already used | Authorization codes are single-use | +| `access_denied` | User denied authorization | Handle gracefully in your UI | + +### HTTP Status Codes + +| Code | Meaning | +|------|---------| +| 200 | Success | +| 400 | Bad request (invalid parameters) | +| 401 | Unauthorized (invalid/expired token) | +| 403 | Forbidden (insufficient scope) | +| 404 | Resource not found | + +## Support + +For issues or questions about OAuth integration: + +- Open an issue on [GitHub](https://github.com/Significant-Gravitas/AutoGPT) +- See the [API Guide](api-guide.md) for general API information +- Check the [Swagger documentation](https://backend.agpt.co/external-api/docs) for endpoint details diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index ebf987f34b..876467633e 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -7,14 +7,14 @@ docs_dir: content nav: - Home: index.md - - The AutoGPT Platform 🆕: - - Getting Started: + - The AutoGPT Platform 🆕: + - Getting Started: - Setup AutoGPT (Local-Host): platform/getting-started.md - Edit an Agent: platform/edit-agent.md - Delete an Agent: platform/delete-agent.md - - Download & Import and Agent: platform/download-agent-from-marketplace-local.md + - Download & Import and Agent: platform/download-agent-from-marketplace-local.md - Create a Basic Agent: platform/create-basic-agent.md - - Submit an Agent to the Marketplace: platform/submit-agent-to-marketplace.md + - Submit an Agent to the Marketplace: platform/submit-agent-to-marketplace.md - Advanced Setup: platform/advanced_setup.md - Agent Blocks: platform/agent-blocks.md - Build your own Blocks: platform/new_blocks.md @@ -23,6 +23,9 @@ nav: - Using AI/ML API: platform/aimlapi.md - Using D-ID: platform/d_id.md - Blocks: platform/blocks/blocks.md + - API: + - Introduction: platform/integrating/api-guide.md + - OAuth & SSO: platform/integrating/oauth-guide.md - Contributing: - Tests: platform/contributing/tests.md - OAuth Flows: platform/contributing/oauth-integration-flow.md From 217e3718d7ea0ecdca27221aad58b738944e3229 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Sat, 20 Dec 2025 16:52:51 +0100 Subject: [PATCH 256/260] feat(platform): implement HITL UI redesign with improved review flow (#11529) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary • Redesigned Human-in-the-Loop review interface with yellow warning scheme • Implemented separate approved_data/rejected_data output pins for human_in_the_loop block • Added real-time execution status tracking to legacy flow for review detection • Fixed button loading states and improved UI consistency across flows • Standardized Tailwind CSS usage removing custom values image image image ## Changes Made ### Backend Changes - Modified `human_in_the_loop.py` block to output separate `approved_data` and `rejected_data` pins instead of single reviewed_data with status - Updated block output schema to support better data flow in graph builder ### Frontend UI Changes - Redesigned PendingReviewsList with yellow warning color scheme (replacing orange) - Fixed button loading states to show spinner only on clicked button - Improved FloatingReviewsPanel layout removing redundant headers - Added real-time status tracking to legacy flow using useFlowRealtime hook - Fixed AgentActivityDropdown text overflow and layout issues - Enhanced Safe Mode toggle positioning and toast timing - Standardized all custom Tailwind values to use standard classes ### Design System Updates - Added yellow design tokens (25, 150, 600) for warning states - Unified REVIEW status handling across all components - Improved component composition patterns ## Test Plan - [x] Verify HITL blocks create separate output pins for approved/rejected data - [x] Test review flow works in both new and legacy flow builders - [x] Confirm button loading states work correctly (only clicked button shows spinner) - [x] Validate AgentActivityDropdown properly displays review status - [x] Check Safe Mode toggle positioning matches old flow - [x] Ensure real-time status updates work in legacy flow - [x] Verify yellow warning colors are consistent throughout 🤖 Generated with [Claude Code](https://claude.ai/code) --------- Co-authored-by: Lluis Agusti --- .../components/FloatingSafeModeToogle.tsx | 86 +++++++++++ .../build/components/FlowEditor/Flow/Flow.tsx | 5 +- .../components/NodeExecutionBadge.tsx | 2 +- .../FlowEditor/nodes/CustomNode/helpers.ts | 2 +- .../components/legacy-builder/Flow/Flow.tsx | 9 +- .../NewAgentLibraryView.tsx | 57 +++++-- .../CredentialRow/CredentialRow.tsx | 2 +- .../components/other/AgentSettingsButton.tsx | 29 ++++ .../selected-views/AnchorLinksWrap.tsx | 14 -- .../selected-views/LoadingSelectedContent.tsx | 12 +- .../SelectedRunView/SelectedRunView.tsx | 61 ++++---- .../components/RunStatusBadge.tsx | 8 +- .../SelectedRunView/components/RunSummary.tsx | 2 +- .../components/SafeModeToggle.tsx | 52 +++++++ .../SelectedRunActions/SelectedRunActions.tsx | 4 +- .../SelectedScheduleView.tsx | 12 +- .../SelectedSettingsView.tsx | 67 ++++++++ .../SelectedTemplateView.tsx | 4 +- .../SelectedTriggerView.tsx | 4 +- .../selected-views/SelectedViewLayout.tsx | 32 +++- .../components/TaskListItem.tsx | 4 +- .../useNewAgentLibraryView.ts | 14 +- .../components/agent-run-status-chip.tsx | 4 +- .../monitoring/components/AgentFlowList.tsx | 2 +- .../AgentActivityDropdown.tsx | 3 +- .../ActivityDropdown/ActivityDropdown.tsx | 2 +- .../components/ActivityItem.tsx | 111 ++++++------- .../molecules/Breadcrumbs/Breadcrumbs.tsx | 20 ++- .../FloatingReviewsPanel.tsx | 35 +++-- .../PendingReviewCard/PendingReviewCard.tsx | 133 +++++++++------- .../PendingReviewsList/PendingReviewsList.tsx | 146 ++++++++---------- .../frontend/src/components/styles/colors.ts | 4 +- .../useAgentSafeMode.ts} | 86 ++--------- .../frontend/src/hooks/useExecutionEvents.ts | 23 ++- 34 files changed, 648 insertions(+), 403 deletions(-) create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx delete mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedSettingsView/SelectedSettingsView.tsx rename autogpt_platform/frontend/src/{components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx => hooks/useAgentSafeMode.ts} (65%) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx new file mode 100644 index 0000000000..c1a7ef3b35 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FloatingSafeModeToogle.tsx @@ -0,0 +1,86 @@ +import { GraphModel } from "@/app/api/__generated__/models/graphModel"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { Button } from "@/components/atoms/Button/Button"; +import { Graph } from "@/lib/autogpt-server-api/types"; +import { cn } from "@/lib/utils"; +import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; +import { Text } from "@/components/atoms/Text/Text"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; + +interface Props { + graph: GraphModel | LibraryAgent | Graph; + className?: string; + fullWidth?: boolean; +} + +export function FloatingSafeModeToggle({ + graph, + className, + fullWidth = false, +}: Props) { + const { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + } = useAgentSafeMode(graph); + + if (!shouldShowToggle || isStateUndetermined || isPending) { + return null; + } + + return ( +
+ + + + + +
+
+ Safe Mode: {currentSafeMode! ? "ON" : "OFF"} +
+
+ {currentSafeMode! + ? "Human in the loop blocks require manual review" + : "Human in the loop blocks proceed automatically"} +
+
+
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx index 13268fc816..d312fd487d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx @@ -16,12 +16,12 @@ import { useCopyPaste } from "./useCopyPaste"; import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel"; import { parseAsString, useQueryStates } from "nuqs"; import { CustomControls } from "./components/CustomControl"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { okData } from "@/app/api/helpers"; import { TriggerAgentBanner } from "./components/TriggerAgentBanner"; import { resolveCollisions } from "./helpers/resolve-collision"; +import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle"; export const Flow = () => { const [{ flowID, flowExecutionID }] = useQueryStates({ @@ -113,8 +113,7 @@ export const Flow = () => { {graph && ( )} diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx index acc0c26156..5571274ffb 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/components/NodeExecutionBadge.tsx @@ -9,7 +9,7 @@ const statusStyles: Record = { INCOMPLETE: "text-slate-700 border-slate-400", QUEUED: "text-blue-700 border-blue-400", RUNNING: "text-amber-700 border-amber-400", - REVIEW: "text-orange-700 border-orange-400 bg-orange-50", + REVIEW: "text-yellow-700 border-yellow-400 bg-yellow-50", COMPLETED: "text-green-700 border-green-400", TERMINATED: "text-orange-700 border-orange-400", FAILED: "text-red-700 border-red-400", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts index 2093fed40f..8d228d0cd0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/nodes/CustomNode/helpers.ts @@ -4,7 +4,7 @@ export const nodeStyleBasedOnStatus: Record = { INCOMPLETE: "ring-slate-300 bg-slate-300", QUEUED: " ring-blue-300 bg-blue-300", RUNNING: "ring-amber-300 bg-amber-300", - REVIEW: "ring-orange-300 bg-orange-300", + REVIEW: "ring-yellow-300 bg-yellow-300", COMPLETED: "ring-green-300 bg-green-300", TERMINATED: "ring-orange-300 bg-orange-300 ", FAILED: "ring-red-300 bg-red-300", diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx index 7e9b54d626..80a6fb022d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx @@ -65,7 +65,8 @@ import NewControlPanel from "@/app/(platform)/build/components/NewControlPanel/N import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { BuildActionBar } from "../BuildActionBar"; import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; +import { useFlowRealtime } from "@/app/(platform)/build/components/FlowEditor/Flow/useFlowRealtime"; +import { FloatingSafeModeToggle } from "../../FloatingSafeModeToogle"; // This is for the history, this is the minimum distance a block must move before it is logged // It helps to prevent spamming the history with small movements especially when pressing on a input in a block @@ -153,6 +154,9 @@ const FlowEditor: React.FC<{ Record >(Object.fromEntries(nodes.map((node) => [node.id, node.position]))); + // Add realtime execution status tracking for FloatingReviewsPanel + useFlowRealtime(); + const router = useRouter(); const pathname = usePathname(); const params = useSearchParams(); @@ -924,8 +928,7 @@ const FlowEditor: React.FC<{ {savedAgent && ( )} {isNewBlockEnabled ? ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx index 2831d6cdba..2d7a1b30f4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx @@ -5,6 +5,7 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { cn } from "@/lib/utils"; import { PlusIcon } from "@phosphor-icons/react"; +import { useEffect } from "react"; import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal"; import { AgentRunsLoading } from "./components/other/AgentRunsLoading"; import { EmptySchedules } from "./components/other/EmptySchedules"; @@ -17,6 +18,7 @@ import { SelectedRunView } from "./components/selected-views/SelectedRunView/Sel import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView"; import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView"; import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView"; +import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout"; import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers"; @@ -24,7 +26,6 @@ import { useNewAgentLibraryView } from "./useNewAgentLibraryView"; export function NewAgentLibraryView() { const { - agentId, agent, ready, activeTemplate, @@ -39,10 +40,17 @@ export function NewAgentLibraryView() { handleCountsChange, handleClearSelectedRun, onRunInitiated, + handleSelectSettings, onTriggerSetup, onScheduleCreated, } = useNewAgentLibraryView(); + useEffect(() => { + if (agent) { + document.title = `${agent.name} - Library - AutoGPT Platform`; + } + }, [agent]); + if (error) { return (
- +
+ +
{activeItem ? ( - activeTab === "scheduled" ? ( + activeItem === "settings" ? ( + + ) : activeTab === "scheduled" ? ( ) ) : sidebarLoading ? ( - + ) : activeTab === "scheduled" ? ( - + ) : activeTab === "templates" ? ( - + ) : activeTab === "triggers" ? ( - + ) : ( - + {"*".repeat(MASKED_KEY_LENGTH)} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx new file mode 100644 index 0000000000..bc710ebc4e --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx @@ -0,0 +1,29 @@ +import { Button } from "@/components/atoms/Button/Button"; +import { GearIcon } from "@phosphor-icons/react"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; + +interface Props { + agent: LibraryAgent; + onSelectSettings: () => void; +} + +export function AgentSettingsButton({ agent, onSelectSettings }: Props) { + const { hasHITLBlocks } = useAgentSafeMode(agent); + + if (!hasHITLBlocks) { + return null; + } + + return ( + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx deleted file mode 100644 index 6dae969142..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AnchorLinksWrap.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { cn } from "@/lib/utils"; -import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; - -type Props = { - children: React.ReactNode; -}; - -export function AnchorLinksWrap({ children }: Props) { - return ( -
- -
- ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx index d239f57e31..dc2bb7cac2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/LoadingSelectedContent.tsx @@ -1,16 +1,22 @@ import { Skeleton } from "@/components/__legacy__/ui/skeleton"; import { cn } from "@/lib/utils"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; import { SelectedViewLayout } from "./SelectedViewLayout"; interface Props { - agentName: string; - agentId: string; + agent: LibraryAgent; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function LoadingSelectedContent(props: Props) { return ( - +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx index ff9a4e5809..9e470139ff 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx @@ -32,6 +32,8 @@ interface Props { runId: string; onSelectRun?: (id: string) => void; onClearSelectedRun?: () => void; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function SelectedRunView({ @@ -39,6 +41,7 @@ export function SelectedRunView({ runId, onSelectRun, onClearSelectedRun, + onSelectSettings, }: Props) { const { run, preset, isLoading, responseError, httpError } = useSelectedRunView(agent.graph_id, runId); @@ -72,13 +75,13 @@ export function SelectedRunView({ } if (isLoading && !run) { - return ; + return ; } return (
- +
@@ -106,6 +109,11 @@ export function SelectedRunView({ className="-mt-2 flex flex-col" > + {withReviews && ( + + Reviews ({pendingReviews.length}) + + )} {withSummary && ( Summary @@ -117,13 +125,29 @@ export function SelectedRunView({ Your input - {withReviews && ( - - Reviews ({pendingReviews.length}) - - )}
+ {/* Human-in-the-Loop Reviews Section */} + {withReviews && ( + +
+ {reviewsLoading ? ( + + ) : pendingReviews.length > 0 ? ( + + ) : ( + + No pending reviews for this execution + + )} +
+
+ )} + {/* Summary Section */} {withSummary && ( @@ -186,29 +210,6 @@ export function SelectedRunView({
- - {/* Reviews Section */} - {withReviews && ( - -
- - {reviewsLoading ? ( - - ) : pendingReviews.length > 0 ? ( - - ) : ( - - No pending reviews for this execution - - )} - -
-
- )}
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx index cf92280c86..3781e724ad 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunStatusBadge.tsx @@ -2,10 +2,10 @@ import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecut import { CheckCircleIcon, ClockIcon, - EyeIcon, PauseCircleIcon, StopCircleIcon, WarningCircleIcon, + WarningIcon, XCircleIcon, } from "@phosphor-icons/react"; import { Text } from "@/components/atoms/Text/Text"; @@ -38,9 +38,9 @@ const statusIconMap: Record = { textColor: "!text-yellow-700", }, REVIEW: { - icon: , - bgColor: "bg-orange-50", - textColor: "!text-orange-700", + icon: , + bgColor: "bg-yellow-50", + textColor: "!text-yellow-700", }, COMPLETED: { icon: ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx index aea9bae7f9..c887ae969d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunSummary.tsx @@ -25,7 +25,7 @@ export function RunSummary({ run }: Props) {

{typeof correctnessScore === "number" && ( -
+
Success Estimate: diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx new file mode 100644 index 0000000000..9ba37d8d17 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SafeModeToggle.tsx @@ -0,0 +1,52 @@ +import { GraphModel } from "@/app/api/__generated__/models/graphModel"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { Button } from "@/components/atoms/Button/Button"; +import { Graph } from "@/lib/autogpt-server-api/types"; +import { cn } from "@/lib/utils"; +import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; +import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; + +interface Props { + graph: GraphModel | LibraryAgent | Graph; + className?: string; + fullWidth?: boolean; +} + +export function SafeModeToggle({ graph }: Props) { + const { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + } = useAgentSafeMode(graph); + + if (!shouldShowToggle || isStateUndetermined) { + return null; + } + + return ( + + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx index 92db3e0b37..cb821b2ecd 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx @@ -2,7 +2,6 @@ import { GraphExecution } from "@/app/api/__generated__/models/graphExecution"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; -import { FloatingSafeModeToggle } from "@/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle"; import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { ArrowBendLeftUpIcon, @@ -16,6 +15,7 @@ import { SelectedActionsWrap } from "../../../SelectedActionsWrap"; import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton"; import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal"; import { useSelectedRunActions } from "./useSelectedRunActions"; +import { SafeModeToggle } from "../SafeModeToggle"; type Props = { agent: LibraryAgent; @@ -113,7 +113,7 @@ export function SelectedRunActions({ shareToken={run.share_token} /> )} - + {canRunManually && ( <> + Agent Settings +
+ +
+ {!hasHITLBlocks ? ( +
+ + This agent doesn't have any human-in-the-loop blocks, so + there are no settings to configure. + +
+ ) : ( +
+
+
+ Require human approval + + The agent will pause and wait for your review before + continuing + +
+ +
+
+ )} +
+
+ + ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx index ead985457e..71f7d582bd 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/SelectedTemplateView.tsx @@ -87,7 +87,7 @@ export function SelectedTemplateView({ } if (isLoading && !template) { - return ; + return ; } if (!template) { @@ -100,7 +100,7 @@ export function SelectedTemplateView({ return (
- +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx index 64d4430e78..f8ec392f09 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/SelectedTriggerView.tsx @@ -81,7 +81,7 @@ export function SelectedTriggerView({ } if (isLoading && !trigger) { - return ; + return ; } if (!trigger) { @@ -93,7 +93,7 @@ export function SelectedTriggerView({ return (
- +
diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx index 242430ba6a..df549bea58 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedViewLayout.tsx @@ -1,11 +1,15 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; +import { AgentSettingsButton } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "../../helpers"; import { SectionWrap } from "../other/SectionWrap"; interface Props { - agentName: string; - agentId: string; + agent: LibraryAgent; children: React.ReactNode; + additionalBreadcrumb?: { name: string; link?: string }; + onSelectSettings?: () => void; + selectedSettings?: boolean; } export function SelectedViewLayout(props: Props) { @@ -14,12 +18,24 @@ export function SelectedViewLayout(props: Props) {
- +
+ + {props.agent && props.onSelectSettings && ( +
+ +
+ )} +
{props.children} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx index 22adc54e4f..2e2871e15e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskListItem.tsx @@ -34,8 +34,8 @@ const statusIconMap: Record = { ), REVIEW: ( - - + + ), COMPLETED: ( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts index b280400401..394edb1a6d 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts @@ -89,10 +89,8 @@ export function useNewAgentLibraryView() { [sidebarCounts], ); - // Show sidebar layout while loading or when there are items - const showSidebarLayout = sidebarLoading || hasAnyItems; - - useEffect(() => { + // Show sidebar layout while loading or when there are items or settings is selected + const showSidebarLayout = useEffect(() => { if (agent) { document.title = `${agent.name} - Library - AutoGPT Platform`; } @@ -134,6 +132,13 @@ export function useNewAgentLibraryView() { }); } + function handleSelectSettings() { + setQueryStates({ + activeItem: "settings", + activeTab: "runs", // Reset to runs tab when going to settings + }); + } + const handleCountsChange = useCallback( (counts: { runsCount: number; @@ -205,6 +210,7 @@ export function useNewAgentLibraryView() { handleCountsChange, handleSelectRun, onRunInitiated, + handleSelectSettings, onTriggerSetup, onScheduleCreated, }; diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx index 24b2864359..58f1ee8381 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-status-chip.tsx @@ -38,7 +38,7 @@ const statusData: Record< draft: { label: "Draft", variant: "secondary" }, stopped: { label: "Stopped", variant: "secondary" }, scheduled: { label: "Scheduled", variant: "secondary" }, - review: { label: "In Review", variant: "orange" }, + review: { label: "In Review", variant: "warning" }, }; const statusStyles = { @@ -47,8 +47,6 @@ const statusStyles = { destructive: "bg-red-100 text-red-800 hover:bg-red-100 hover:text-red-800", warning: "bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800", - orange: - "bg-orange-100 text-orange-800 hover:bg-orange-100 hover:text-orange-800", info: "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800", secondary: "bg-slate-100 text-slate-800 hover:bg-slate-100 hover:text-slate-800", diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx index 1080a355cd..d4cb6fc649 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/AgentFlowList.tsx @@ -54,7 +54,7 @@ export const AgentFlowList = ({
{/* Split "Create" button */} - diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx index 6dcea33d51..1d120c3b09 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/AgentActivityDropdown.tsx @@ -48,8 +48,7 @@ export function AgentActivityDropdown() { className="absolute bottom-[-2.5rem] left-1/2 z-50 hidden -translate-x-1/2 transform whitespace-nowrap rounded-small bg-white px-4 py-2 shadow-md group-hover:block" > - {activeCount} running agent - {activeCount > 1 ? "s" : ""} + {activeCount} active agent{activeCount > 1 ? "s" : ""}
diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx index 4eafee8f2e..263453b327 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/components/ActivityDropdown/ActivityDropdown.tsx @@ -130,7 +130,7 @@ export function ActivityDropdown({ {filteredExecutions.length > 0 ? ( ; + return ; case AgentExecutionStatus.RUNNING: return ( - + ); case AgentExecutionStatus.COMPLETED: - return ( - - ); + return ; case AgentExecutionStatus.FAILED: - return ; + return ; case AgentExecutionStatus.TERMINATED: - return ( - - ); + return ; case AgentExecutionStatus.INCOMPLETE: - return ; + return ; case AgentExecutionStatus.REVIEW: - return ; + return ; default: return null; } } - function getTimeDisplay() { + function getItemDisplay() { + // Handle active statuses (running/queued) const isActiveStatus = execution.status === AgentExecutionStatus.RUNNING || - execution.status === AgentExecutionStatus.QUEUED || - execution.status === AgentExecutionStatus.REVIEW; + execution.status === AgentExecutionStatus.QUEUED; if (isActiveStatus) { const timeAgo = formatTimeAgo(execution.started_at.toString()); - let statusText = "running"; - if (execution.status === AgentExecutionStatus.QUEUED) { - statusText = "queued"; - } - return `Started ${timeAgo}, ${getExecutionDuration(execution)} ${statusText}`; + const statusText = + execution.status === AgentExecutionStatus.QUEUED ? "queued" : "running"; + return [ + `Started ${timeAgo}, ${getExecutionDuration(execution)} ${statusText}`, + ]; } - if (execution.ended_at) { - const timeAgo = formatTimeAgo(execution.ended_at.toString()); - switch (execution.status) { - case AgentExecutionStatus.COMPLETED: - return `Completed ${timeAgo}`; - case AgentExecutionStatus.FAILED: - return `Failed ${timeAgo}`; - case AgentExecutionStatus.TERMINATED: - return `Stopped ${timeAgo}`; - case AgentExecutionStatus.INCOMPLETE: - return `Incomplete ${timeAgo}`; - case AgentExecutionStatus.REVIEW: - return `In review ${timeAgo}`; - default: - return `Ended ${timeAgo}`; - } + // Handle all other statuses with time display + const timeAgo = execution.ended_at + ? formatTimeAgo(execution.ended_at.toString()) + : formatTimeAgo(execution.started_at.toString()); + + let statusText = "ended"; + switch (execution.status) { + case AgentExecutionStatus.COMPLETED: + statusText = "completed"; + break; + case AgentExecutionStatus.FAILED: + statusText = "failed"; + break; + case AgentExecutionStatus.TERMINATED: + statusText = "stopped"; + break; + case AgentExecutionStatus.INCOMPLETE: + statusText = "incomplete"; + break; + case AgentExecutionStatus.REVIEW: + statusText = "awaiting approval"; + break; } - return "Unknown"; + return [ + `${statusText.charAt(0).toUpperCase() + statusText.slice(1)} ${timeAgo}`, + ]; } // Determine the tab based on execution status @@ -101,20 +100,22 @@ export function ActivityItem({ execution }: Props) { {/* Icon + Agent Name */}
{getStatusIcon()} - + {execution.agent_name}
{/* Agent Message - Indented */}
- {/* Time - Indented */} - - {getTimeDisplay()} - + {getItemDisplay().map((line, index) => ( + + {line} + + ))}
); diff --git a/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx b/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx index 1fb69763fe..f990970a6c 100644 --- a/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx +++ b/autogpt_platform/frontend/src/components/molecules/Breadcrumbs/Breadcrumbs.tsx @@ -4,7 +4,7 @@ import * as React from "react"; interface BreadcrumbItem { name: string; - link: string; + link?: string; } interface Props { @@ -16,12 +16,18 @@ export function Breadcrumbs({ items }: Props) {
{items.map((item, index) => ( - - {item.name} - + {item.link ? ( + + {item.name} + + ) : ( + + {item.name} + + )} {index < items.length - 1 && ( / diff --git a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx index 183fd8599e..12014e50fe 100644 --- a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx +++ b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx @@ -7,6 +7,8 @@ import { cn } from "@/lib/utils"; import { Text } from "@/components/atoms/Text/Text"; import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; +import { useGraphStore } from "@/app/(platform)/build/stores/graphStore"; +import { useShallow } from "zustand/react/shallow"; interface FloatingReviewsPanelProps { executionId?: string; @@ -34,6 +36,11 @@ export function FloatingReviewsPanel({ const executionStatus = executionDetails?.status === 200 ? executionDetails.data.status : undefined; + // Get graph execution status from the store (updated via WebSocket) + const graphExecutionStatus = useGraphStore( + useShallow((state) => state.graphExecutionStatus), + ); + const { pendingReviews, isLoading, refetch } = usePendingReviewsForExecution( executionId || "", ); @@ -44,6 +51,13 @@ export function FloatingReviewsPanel({ } }, [executionStatus, executionId, refetch]); + // Refetch when graph execution status changes to REVIEW + useEffect(() => { + if (graphExecutionStatus === AgentExecutionStatus.REVIEW && executionId) { + refetch(); + } + }, [graphExecutionStatus, executionId, refetch]); + if ( !executionId || (!isLoading && @@ -73,18 +87,17 @@ export function FloatingReviewsPanel({ )} {isOpen && ( -
-
-
- - Pending Reviews -
- -
+
+ -
+
{isLoading ? (
diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx index b5094dd4cd..3ac636060c 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewCard/PendingReviewCard.tsx @@ -40,18 +40,18 @@ function extractReviewData(payload: unknown): { interface PendingReviewCardProps { review: PendingHumanReviewModel; onReviewDataChange: (nodeExecId: string, data: string) => void; - reviewMessage: string; - onReviewMessageChange: (nodeExecId: string, message: string) => void; - isDisabled: boolean; - onToggleDisabled: (nodeExecId: string) => void; + reviewMessage?: string; + onReviewMessageChange?: (nodeExecId: string, message: string) => void; + isDisabled?: boolean; + onToggleDisabled?: (nodeExecId: string) => void; } export function PendingReviewCard({ review, onReviewDataChange, - reviewMessage, + reviewMessage = "", onReviewMessageChange, - isDisabled, + isDisabled = false, onToggleDisabled, }: PendingReviewCardProps) { const extractedData = extractReviewData(review.payload); @@ -65,9 +65,12 @@ export function PendingReviewCard({ }; const handleMessageChange = (newMessage: string) => { - onReviewMessageChange(review.node_exec_id, newMessage); + onReviewMessageChange?.(review.node_exec_id, newMessage); }; + // Show simplified view when no toggle functionality is provided (Screenshot 1 mode) + const showSimplified = !onToggleDisabled; + const renderDataInput = () => { const data = currentData; @@ -134,60 +137,80 @@ export function PendingReviewCard({ } }; - return ( -
-
-
- {isDisabled && ( - - This item will be rejected - - )} -
- -
+ // Helper function to get proper field label + const getFieldLabel = (instructions?: string) => { + if (instructions) + return instructions.charAt(0).toUpperCase() + instructions.slice(1); + return "Data to Review"; + }; - {instructions && ( -
- - Instructions: - - {instructions} + // Use the existing HITL review interface + return ( +
+ {!showSimplified && ( +
+
+ {isDisabled && ( + + This item will be rejected + + )} +
+
)} -
- - Data to Review: - {!isDataEditable && ( - - (Read-only) - + {/* Show instructions as field label */} + {instructions && ( +
+ + {getFieldLabel(instructions)} + + {isDataEditable && !isDisabled ? ( + renderDataInput() + ) : ( +
+ + {JSON.stringify(currentData, null, 2)} + +
)} - - {isDataEditable && !isDisabled ? ( - renderDataInput() - ) : ( -
- - {JSON.stringify(currentData, null, 2)} - -
- )} -
+
+ )} - {isDisabled && ( + {/* If no instructions, show data directly */} + {!instructions && ( +
+ + Data to Review + {!isDataEditable && ( + + (Read-only) + + )} + + {isDataEditable && !isDisabled ? ( + renderDataInput() + ) : ( +
+ + {JSON.stringify(currentData, null, 2)} + +
+ )} +
+ )} + + {!showSimplified && isDisabled && (
Rejection Reason (Optional): diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx index 320d84e91f..ddc9bab972 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx @@ -4,7 +4,7 @@ import { PendingReviewCard } from "@/components/organisms/PendingReviewCard/Pend import { Text } from "@/components/atoms/Text/Text"; import { Button } from "@/components/atoms/Button/Button"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { ClockIcon, PlayIcon, XIcon, CheckIcon } from "@phosphor-icons/react"; +import { ClockIcon, WarningIcon } from "@phosphor-icons/react"; import { usePostV2ProcessReviewAction } from "@/app/api/__generated__/endpoints/executions/executions"; interface PendingReviewsListProps { @@ -35,9 +35,10 @@ export function PendingReviewsList({ const [reviewMessageMap, setReviewMessageMap] = useState< Record >({}); - const [disabledReviews, setDisabledReviews] = useState>( - new Set(), - ); + + const [pendingAction, setPendingAction] = useState< + "approve" | "reject" | null + >(null); const { toast } = useToast(); @@ -69,9 +70,11 @@ export function PendingReviewsList({ }); } + setPendingAction(null); onReviewComplete?.(); }, onError: (error: Error) => { + setPendingAction(null); toast({ title: "Failed to process reviews", description: error.message || "An error occurred", @@ -89,28 +92,7 @@ export function PendingReviewsList({ setReviewMessageMap((prev) => ({ ...prev, [nodeExecId]: message })); } - function handleToggleDisabled(nodeExecId: string) { - setDisabledReviews((prev) => { - const newSet = new Set(prev); - if (newSet.has(nodeExecId)) { - newSet.delete(nodeExecId); - } else { - newSet.add(nodeExecId); - } - return newSet; - }); - } - - function handleApproveAll() { - setDisabledReviews(new Set()); - } - - function handleRejectAll() { - const allReviewIds = reviews.map((review) => review.node_exec_id); - setDisabledReviews(new Set(allReviewIds)); - } - - function handleContinue() { + function processReviews(approved: boolean) { if (reviews.length === 0) { toast({ title: "No reviews to process", @@ -120,34 +102,34 @@ export function PendingReviewsList({ return; } + setPendingAction(approved ? "approve" : "reject"); const reviewItems = []; for (const review of reviews) { - const isApproved = !disabledReviews.has(review.node_exec_id); const reviewData = reviewDataMap[review.node_exec_id]; const reviewMessage = reviewMessageMap[review.node_exec_id]; - let parsedData; - if (isApproved && review.editable && reviewData) { + let parsedData: any = review.payload; // Default to original payload + + // Parse edited data if available and editable + if (review.editable && reviewData) { try { parsedData = JSON.parse(reviewData); - if (JSON.stringify(parsedData) === JSON.stringify(review.payload)) { - parsedData = undefined; - } } catch (error) { toast({ title: "Invalid JSON", description: `Please fix the JSON format in review for node ${review.node_exec_id}: ${error instanceof Error ? error.message : "Invalid syntax"}`, variant: "destructive", }); + setPendingAction(null); return; } } reviewItems.push({ node_exec_id: review.node_exec_id, - approved: isApproved, - reviewed_data: isApproved ? parsedData : undefined, + approved, + reviewed_data: parsedData, message: reviewMessage || undefined, }); } @@ -175,71 +157,67 @@ export function PendingReviewsList({ } return ( -
-
+
+ {/* Warning Box Header */} +
+
+ + + Your review is needed + +
+ + This task is paused until you approve the changes below. Please review + and edit if needed. + +
+ +
{reviews.map((review) => ( ))}
-
-
- - -
+
+ + Note: Changes you make here apply only to this task + -
-
- - {disabledReviews.size > 0 ? ( - <> - Approve {reviews.length - disabledReviews.size}, reject{" "} - {disabledReviews.size} of {reviews.length} items - - ) : ( - <>Approve all {reviews.length} items - )} - -
+
+
diff --git a/autogpt_platform/frontend/src/components/styles/colors.ts b/autogpt_platform/frontend/src/components/styles/colors.ts index 4e37b9bdd5..cfb25ac107 100644 --- a/autogpt_platform/frontend/src/components/styles/colors.ts +++ b/autogpt_platform/frontend/src/components/styles/colors.ts @@ -48,13 +48,15 @@ export const colors = { 900: "#6b3900", }, yellow: { + 25: "#FFFCF3", 50: "#fef9e6", 100: "#fcebb0", + 150: "#FDEFBF", 200: "#fae28a", 300: "#f8d554", 400: "#f7cd33", 500: "#f5c000", - 600: "#dfaf00", + 600: "#DFAF00", 700: "#ae8800", 800: "#876a00", 900: "#675100", diff --git a/autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts similarity index 65% rename from autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx rename to autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts index a10367177f..654ef858b6 100644 --- a/autogpt_platform/frontend/src/components/molecules/FloatingSafeModeToggle/FloatingSafeModeToggle.tsx +++ b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts @@ -1,22 +1,14 @@ +import { useCallback, useState, useEffect } from "react"; import { usePatchV1UpdateGraphSettings } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV2GetLibraryAgentQueryOptions, useGetV2GetLibraryAgentByGraphId, } from "@/app/api/__generated__/endpoints/library/library"; +import { useToast } from "@/components/molecules/Toast/use-toast"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import { Button } from "@/components/atoms/Button/Button"; -import { - Tooltip, - TooltipContent, - TooltipTrigger, -} from "@/components/atoms/Tooltip/BaseTooltip"; -import { useToast } from "@/components/molecules/Toast/use-toast"; -import { Graph } from "@/lib/autogpt-server-api/types"; -import { cn } from "@/lib/utils"; -import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react"; import { useQueryClient } from "@tanstack/react-query"; -import { useCallback, useEffect, useState } from "react"; +import { Graph } from "@/lib/autogpt-server-api/types"; function getGraphId(graph: GraphModel | LibraryAgent | Graph): string { if ("graph_id" in graph) return graph.graph_id || ""; @@ -41,19 +33,7 @@ function isLibraryAgent( return "graph_id" in graph && "settings" in graph; } -interface FloatingSafeModeToggleProps { - graph: GraphModel | LibraryAgent | Graph; - className?: string; - fullWidth?: boolean; - variant?: "white" | "black"; -} - -export function FloatingSafeModeToggle({ - graph, - className, - fullWidth = false, - variant = "white", -}: FloatingSafeModeToggleProps) { +export function useAgentSafeMode(graph: GraphModel | LibraryAgent | Graph) { const { toast } = useToast(); const queryClient = useQueryClient(); @@ -120,6 +100,7 @@ export function FloatingSafeModeToggle({ description: newSafeMode ? "Human-in-the-loop blocks will require manual review" : "Human-in-the-loop blocks will proceed automatically", + duration: 2000, }); } catch (error) { const isNotFoundError = @@ -154,53 +135,12 @@ export function FloatingSafeModeToggle({ toast, ]); - if (!shouldShowToggle || isStateUndetermined) { - return null; - } - - return ( -
- - - - - -
-
- Safe Mode: {currentSafeMode! ? "ON" : "OFF"} -
-
- {currentSafeMode! - ? "HITL blocks require manual review" - : "HITL blocks proceed automatically"} -
-
-
-
-
- ); + return { + currentSafeMode, + isPending, + shouldShowToggle, + isStateUndetermined, + handleToggle, + hasHITLBlocks: shouldShowToggle, + }; } diff --git a/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts b/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts index 9af2b8aead..ac24396a20 100644 --- a/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts +++ b/autogpt_platform/frontend/src/hooks/useExecutionEvents.ts @@ -63,21 +63,16 @@ export function useExecutionEvents({ if (subscribedIds.has(id)) return; subscribedIds.add(id); - api - .subscribeToGraphExecutions(id as GraphID) - .then(() => { - console.debug(`Subscribed to execution updates for graph ${id}`); - }) - .catch((error) => { - console.error( - `Failed to subscribe to execution updates for graph ${id}:`, - error, - ); - Sentry.captureException(error, { - tags: { graphId: id }, - }); - subscribedIds.delete(id); + api.subscribeToGraphExecutions(id as GraphID).catch((error) => { + console.error( + `Failed to subscribe to execution updates for graph ${id}:`, + error, + ); + Sentry.captureException(error, { + tags: { graphId: id }, }); + subscribedIds.delete(id); + }); }); }); From de78d062a9e938bea2efed7505bf6f586c86919a Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sat, 20 Dec 2025 21:33:10 +0100 Subject: [PATCH 257/260] refactor(backend/api): Clean up API file structure (#11629) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We'll soon be needing a more feature-complete external API. To make way for this, I'm moving some files around so: - We can more easily create new versions of our external API - The file structure of our internal API is more homogeneous These changes are quite opinionated, but IMO in any case they're better than the chaotic structure we have now. ### Changes 🏗️ - Move `backend/server` -> `backend/api` - Move `backend/server/routers` + `backend/server/v2` -> `backend/api/features` - Change absolute sibling imports to relative imports - Move `backend/server/v2/AutoMod` -> `backend/executor/automod` - Combine `backend/server/routers/analytics_*test.py` -> `backend/api/features/analytics_test.py` - Sort OpenAPI spec file ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - CI tests - [x] Clicking around in the app -> no obvious breakage --- .../autogpt_libs/autogpt_libs/auth/helpers.py | 12 +- autogpt_platform/backend/TESTING.md | 4 +- .../backend/{server => api}/__init__.py | 0 .../backend/{server => api}/conftest.py | 0 .../backend/{server => api}/conn_manager.py | 2 +- .../{server => api}/conn_manager_test.py | 4 +- .../backend/api/external/fastapi_app.py | 25 + .../{server => api}/external/middleware.py | 0 .../routes => api/external/v1}/__init__.py | 0 .../external/v1}/integrations.py | 4 +- .../v1.py => api/external/v1/routes.py} | 12 +- .../routes => api/external/v1}/tools.py | 8 +- .../postmark => api/features}/__init__.py | 0 .../features/admin}/__init__.py | 0 .../features}/admin/credit_admin_routes.py | 3 +- .../admin/credit_admin_routes_test.py | 33 +- .../admin/execution_analytics_routes.py | 0 .../v2 => api/features}/admin/model.py | 0 .../features}/admin/store_admin_routes.py | 24 +- .../routers => api/features}/analytics.py | 3 +- .../backend/api/features/analytics_test.py | 340 + .../features/builder}/__init__.py | 0 .../{server/v2 => api/features}/builder/db.py | 15 +- .../v2 => api/features}/builder/model.py | 4 +- .../v2 => api/features}/builder/routes.py | 5 +- .../backend/api/features/chat/__init__.py | 0 .../v2 => api/features}/chat/config.py | 0 .../{server/v2 => api/features}/chat/model.py | 3 +- .../v2 => api/features}/chat/model_test.py | 2 +- .../features}/chat/prompts/chat_system.md | 0 .../features}/chat/response_model.py | 0 .../v2 => api/features}/chat/routes.py | 5 +- .../v2 => api/features}/chat/service.py | 13 +- .../v2 => api/features}/chat/service_test.py | 4 +- .../features}/chat/tools/__init__.py | 4 +- .../features}/chat/tools/_test_data.py | 4 +- .../v2 => api/features}/chat/tools/base.py | 4 +- .../features}/chat/tools/find_agent.py | 11 +- .../v2 => api/features}/chat/tools/models.py | 0 .../features}/chat/tools/run_agent.py | 23 +- .../features}/chat/tools/run_agent_test.py | 4 +- .../v2 => api/features}/chat/tools/utils.py | 6 +- .../api/features/executions/__init__.py | 0 .../features/executions/review/__init__.py | 0 .../features}/executions/review/model.py | 0 .../executions/review/review_routes_test.py | 49 +- .../features}/executions/review/routes.py | 7 +- .../api/features/integrations/__init__.py | 0 .../features}/integrations/models.py | 0 .../features}/integrations/router.py | 11 +- .../backend/api/features/library/__init__.py | 0 .../{server/v2 => api/features}/library/db.py | 10 +- .../v2 => api/features}/library/db_test.py | 17 +- .../v2 => api/features}/library/model.py | 0 .../v2 => api/features}/library/model_test.py | 2 +- .../features}/library/routes/__init__.py | 0 .../features}/library/routes/agents.py | 7 +- .../features}/library/routes/presets.py | 5 +- .../features}/library/routes_test.py | 19 +- .../{server/routers => api/features}/oauth.py | 4 +- .../routers => api/features}/oauth_test.py | 2 +- .../backend/api/features/otto/__init__.py | 0 .../v2 => api/features}/otto/models.py | 0 .../v2 => api/features}/otto/routes.py | 0 .../v2 => api/features}/otto/routes_test.py | 6 +- .../v2 => api/features}/otto/service.py | 0 .../backend/api/features/postmark/__init__.py | 0 .../features}/postmark/models.py | 0 .../features}/postmark/postmark.py | 7 +- .../v2 => api/features}/store/README.md | 0 .../backend/api/features/store/__init__.py | 0 .../v2 => api/features}/store/cache.py | 13 +- .../{server/v2 => api/features}/store/db.py | 173 +- .../v2 => api/features}/store/db_test.py | 4 +- .../v2 => api/features}/store/exceptions.py | 0 .../v2 => api/features}/store/image_gen.py | 0 .../v2 => api/features}/store/media.py | 45 +- .../v2 => api/features}/store/media_test.py | 50 +- .../v2 => api/features}/store/model.py | 0 .../v2 => api/features}/store/model_test.py | 34 +- .../v2 => api/features}/store/routes.py | 86 +- .../v2 => api/features}/store/routes_test.py | 154 +- .../features}/store/test_cache_delete.py | 11 +- .../{server/routers => api/features}/v1.py | 155 +- .../routers => api/features}/v1_test.py | 60 +- .../{server => api}/middleware/security.py | 0 .../middleware/security_test.py | 2 +- .../backend/backend/{server => api}/model.py | 0 .../backend/{server => api}/rest_api.py | 122 +- .../backend/{server => api}/test_helpers.py | 0 .../{server => api}/utils/api_key_auth.py | 0 .../utils/api_key_auth_test.py | 2 +- .../backend/{server => api}/utils/cors.py | 0 .../{server => api}/utils/cors_test.py | 2 +- .../backend/backend/api/utils/openapi.py | 41 + .../backend/backend/{server => api}/ws_api.py | 16 +- .../backend/{server => api}/ws_api_test.py | 26 +- autogpt_platform/backend/backend/app.py | 4 +- .../blocks/test/test_smart_decision_maker.py | 4 +- autogpt_platform/backend/backend/cli.py | 6 +- .../backend/cli/generate_openapi_json.py | 4 +- .../backend/backend/data/__init__.py | 2 +- .../backend/backend/data/credit.py | 2 +- autogpt_platform/backend/backend/data/db.py | 2 +- .../backend/backend/data/graph_test.py | 4 +- .../backend/backend/data/human_review.py | 2 +- .../backend/backend/data/integrations.py | 6 +- .../backend/backend/data/notification_bus.py | 2 +- .../backend/backend/data/onboarding.py | 4 +- .../AutoMod => executor/automod}/__init__.py | 0 .../AutoMod => executor/automod}/manager.py | 7 +- .../v2/AutoMod => executor/automod}/models.py | 0 .../backend/backend/executor/database.py | 7 +- .../backend/backend/executor/manager.py | 36 +- .../backend/backend/executor/manager_test.py | 18 +- .../backend/executor/scheduler_test.py | 2 +- .../backend/integrations/webhooks/utils.py | 4 +- autogpt_platform/backend/backend/rest.py | 2 +- .../backend/backend/server/external/api.py | 29 - .../server/routers/analytics_improved_test.py | 150 - .../routers/analytics_parametrized_test.py | 115 - .../backend/server/routers/analytics_test.py | 284 - autogpt_platform/backend/backend/util/test.py | 2 +- .../backend/backend/util/virus_scanner.py | 2 +- .../backend/util/virus_scanner_test.py | 2 +- autogpt_platform/backend/backend/ws.py | 2 +- .../backend/test/e2e_test_data.py | 12 +- .../app/(platform)/auth/authorize/page.tsx | 2 +- .../frontend/src/app/api/openapi.json | 8817 +++++++++-------- 129 files changed, 5527 insertions(+), 5731 deletions(-) rename autogpt_platform/backend/backend/{server => api}/__init__.py (100%) rename autogpt_platform/backend/backend/{server => api}/conftest.py (100%) rename autogpt_platform/backend/backend/{server => api}/conn_manager.py (98%) rename autogpt_platform/backend/backend/{server => api}/conn_manager_test.py (98%) create mode 100644 autogpt_platform/backend/backend/api/external/fastapi_app.py rename autogpt_platform/backend/backend/{server => api}/external/middleware.py (100%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/integrations.py (99%) rename autogpt_platform/backend/backend/{server/external/routes/v1.py => api/external/v1/routes.py} (96%) rename autogpt_platform/backend/backend/{server/external/routes => api/external/v1}/tools.py (94%) rename autogpt_platform/backend/backend/{server/routers/postmark => api/features}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2/library => api/features/admin}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/credit_admin_routes.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/credit_admin_routes_test.py (90%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/execution_analytics_routes.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/admin/store_admin_routes.py (84%) rename autogpt_platform/backend/backend/{server/routers => api/features}/analytics.py (94%) create mode 100644 autogpt_platform/backend/backend/api/features/analytics_test.py rename autogpt_platform/backend/backend/{server/v2/store => api/features/builder}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/db.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/model.py (93%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/builder/routes.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/chat/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/config.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/model.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/model_test.py (97%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/prompts/chat_system.md (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/response_model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/routes.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/service.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/service_test.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/__init__.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/_test_data.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/base.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/find_agent.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/models.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/run_agent.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/run_agent_test.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/chat/tools/utils.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/executions/__init__.py create mode 100644 autogpt_platform/backend/backend/api/features/executions/review/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/review_routes_test.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/executions/review/routes.py (98%) create mode 100644 autogpt_platform/backend/backend/api/features/integrations/__init__.py rename autogpt_platform/backend/backend/{server => api/features}/integrations/models.py (100%) rename autogpt_platform/backend/backend/{server => api/features}/integrations/router.py (99%) create mode 100644 autogpt_platform/backend/backend/api/features/library/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/db.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/db_test.py (94%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/model_test.py (95%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/agents.py (98%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes/presets.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/library/routes_test.py (93%) rename autogpt_platform/backend/backend/{server/routers => api/features}/oauth.py (99%) rename autogpt_platform/backend/backend/{server/routers => api/features}/oauth_test.py (99%) create mode 100644 autogpt_platform/backend/backend/api/features/otto/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/models.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/routes.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/routes_test.py (97%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/otto/service.py (100%) create mode 100644 autogpt_platform/backend/backend/api/features/postmark/__init__.py rename autogpt_platform/backend/backend/{server/routers => api/features}/postmark/models.py (100%) rename autogpt_platform/backend/backend/{server/routers => api/features}/postmark/postmark.py (96%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/README.md (100%) create mode 100644 autogpt_platform/backend/backend/api/features/store/__init__.py rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/cache.py (85%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/db.py (92%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/db_test.py (99%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/exceptions.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/image_gen.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/media.py (81%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/media_test.py (75%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/model.py (100%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/model_test.py (83%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/routes.py (87%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/routes_test.py (76%) rename autogpt_platform/backend/backend/{server/v2 => api/features}/store/test_cache_delete.py (96%) rename autogpt_platform/backend/backend/{server/routers => api/features}/v1.py (98%) rename autogpt_platform/backend/backend/{server/routers => api/features}/v1_test.py (91%) rename autogpt_platform/backend/backend/{server => api}/middleware/security.py (100%) rename autogpt_platform/backend/backend/{server => api}/middleware/security_test.py (98%) rename autogpt_platform/backend/backend/{server => api}/model.py (100%) rename autogpt_platform/backend/backend/{server => api}/rest_api.py (78%) rename autogpt_platform/backend/backend/{server => api}/test_helpers.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/api_key_auth.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/api_key_auth_test.py (99%) rename autogpt_platform/backend/backend/{server => api}/utils/cors.py (100%) rename autogpt_platform/backend/backend/{server => api}/utils/cors_test.py (97%) create mode 100644 autogpt_platform/backend/backend/api/utils/openapi.py rename autogpt_platform/backend/backend/{server => api}/ws_api.py (98%) rename autogpt_platform/backend/backend/{server => api}/ws_api_test.py (92%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/__init__.py (100%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/manager.py (99%) rename autogpt_platform/backend/backend/{server/v2/AutoMod => executor/automod}/models.py (100%) delete mode 100644 autogpt_platform/backend/backend/server/external/api.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_improved_test.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py delete mode 100644 autogpt_platform/backend/backend/server/routers/analytics_test.py diff --git a/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py b/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py index d3d571d73c..10101778e7 100644 --- a/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py +++ b/autogpt_platform/autogpt_libs/autogpt_libs/auth/helpers.py @@ -1,29 +1,25 @@ from fastapi import FastAPI -from fastapi.openapi.utils import get_openapi from .jwt_utils import bearer_jwt_auth def add_auth_responses_to_openapi(app: FastAPI) -> None: """ - Set up custom OpenAPI schema generation that adds 401 responses + Patch a FastAPI instance's `openapi()` method to add 401 responses to all authenticated endpoints. This is needed when using HTTPBearer with auto_error=False to get proper 401 responses instead of 403, but FastAPI only automatically adds security responses when auto_error=True. """ + # Wrap current method to allow stacking OpenAPI schema modifiers like this + wrapped_openapi = app.openapi def custom_openapi(): if app.openapi_schema: return app.openapi_schema - openapi_schema = get_openapi( - title=app.title, - version=app.version, - description=app.description, - routes=app.routes, - ) + openapi_schema = wrapped_openapi() # Add 401 response to all endpoints that have security requirements for path, methods in openapi_schema["paths"].items(): diff --git a/autogpt_platform/backend/TESTING.md b/autogpt_platform/backend/TESTING.md index 39fe4611b4..a3a5db68ef 100644 --- a/autogpt_platform/backend/TESTING.md +++ b/autogpt_platform/backend/TESTING.md @@ -108,7 +108,7 @@ import fastapi.testclient import pytest from pytest_snapshot.plugin import Snapshot -from backend.server.v2.myroute import router +from backend.api.features.myroute import router app = fastapi.FastAPI() app.include_router(router) @@ -149,7 +149,7 @@ These provide the easiest way to set up authentication mocking in test modules: import fastapi import fastapi.testclient import pytest -from backend.server.v2.myroute import router +from backend.api.features.myroute import router app = fastapi.FastAPI() app.include_router(router) diff --git a/autogpt_platform/backend/backend/server/__init__.py b/autogpt_platform/backend/backend/api/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/__init__.py rename to autogpt_platform/backend/backend/api/__init__.py diff --git a/autogpt_platform/backend/backend/server/conftest.py b/autogpt_platform/backend/backend/api/conftest.py similarity index 100% rename from autogpt_platform/backend/backend/server/conftest.py rename to autogpt_platform/backend/backend/api/conftest.py diff --git a/autogpt_platform/backend/backend/server/conn_manager.py b/autogpt_platform/backend/backend/api/conn_manager.py similarity index 98% rename from autogpt_platform/backend/backend/server/conn_manager.py rename to autogpt_platform/backend/backend/api/conn_manager.py index 8d65117564..52e0f50f69 100644 --- a/autogpt_platform/backend/backend/server/conn_manager.py +++ b/autogpt_platform/backend/backend/api/conn_manager.py @@ -3,12 +3,12 @@ from typing import Dict, Set from fastapi import WebSocket +from backend.api.model import NotificationPayload, WSMessage, WSMethod from backend.data.execution import ( ExecutionEventType, GraphExecutionEvent, NodeExecutionEvent, ) -from backend.server.model import NotificationPayload, WSMessage, WSMethod _EVENT_TYPE_TO_METHOD_MAP: dict[ExecutionEventType, WSMethod] = { ExecutionEventType.GRAPH_EXEC_UPDATE: WSMethod.GRAPH_EXECUTION_EVENT, diff --git a/autogpt_platform/backend/backend/server/conn_manager_test.py b/autogpt_platform/backend/backend/api/conn_manager_test.py similarity index 98% rename from autogpt_platform/backend/backend/server/conn_manager_test.py rename to autogpt_platform/backend/backend/api/conn_manager_test.py index 379928fae7..71dbc0ffee 100644 --- a/autogpt_platform/backend/backend/server/conn_manager_test.py +++ b/autogpt_platform/backend/backend/api/conn_manager_test.py @@ -4,13 +4,13 @@ from unittest.mock import AsyncMock import pytest from fastapi import WebSocket +from backend.api.conn_manager import ConnectionManager +from backend.api.model import NotificationPayload, WSMessage, WSMethod from backend.data.execution import ( ExecutionStatus, GraphExecutionEvent, NodeExecutionEvent, ) -from backend.server.conn_manager import ConnectionManager -from backend.server.model import NotificationPayload, WSMessage, WSMethod @pytest.fixture diff --git a/autogpt_platform/backend/backend/api/external/fastapi_app.py b/autogpt_platform/backend/backend/api/external/fastapi_app.py new file mode 100644 index 0000000000..b55c918a74 --- /dev/null +++ b/autogpt_platform/backend/backend/api/external/fastapi_app.py @@ -0,0 +1,25 @@ +from fastapi import FastAPI + +from backend.api.middleware.security import SecurityHeadersMiddleware +from backend.monitoring.instrumentation import instrument_fastapi + +from .v1.routes import v1_router + +external_api = FastAPI( + title="AutoGPT External API", + description="External API for AutoGPT integrations", + docs_url="/docs", + version="1.0", +) + +external_api.add_middleware(SecurityHeadersMiddleware) +external_api.include_router(v1_router, prefix="/v1") + +# Add Prometheus instrumentation +instrument_fastapi( + external_api, + service_name="external-api", + expose_endpoint=True, + endpoint="/metrics", + include_in_schema=True, +) diff --git a/autogpt_platform/backend/backend/server/external/middleware.py b/autogpt_platform/backend/backend/api/external/middleware.py similarity index 100% rename from autogpt_platform/backend/backend/server/external/middleware.py rename to autogpt_platform/backend/backend/api/external/middleware.py diff --git a/autogpt_platform/backend/backend/server/external/routes/__init__.py b/autogpt_platform/backend/backend/api/external/v1/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/external/routes/__init__.py rename to autogpt_platform/backend/backend/api/external/v1/__init__.py diff --git a/autogpt_platform/backend/backend/server/external/routes/integrations.py b/autogpt_platform/backend/backend/api/external/v1/integrations.py similarity index 99% rename from autogpt_platform/backend/backend/server/external/routes/integrations.py rename to autogpt_platform/backend/backend/api/external/v1/integrations.py index f9a8875ada..a3df481a67 100644 --- a/autogpt_platform/backend/backend/server/external/routes/integrations.py +++ b/autogpt_platform/backend/backend/api/external/v1/integrations.py @@ -16,6 +16,8 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field, SecretStr +from backend.api.external.middleware import require_permission +from backend.api.features.integrations.models import get_all_provider_names from backend.data.auth.base import APIAuthorizationInfo from backend.data.model import ( APIKeyCredentials, @@ -28,8 +30,6 @@ from backend.data.model import ( from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME from backend.integrations.providers import ProviderName -from backend.server.external.middleware import require_permission -from backend.server.integrations.models import get_all_provider_names from backend.util.settings import Settings if TYPE_CHECKING: diff --git a/autogpt_platform/backend/backend/server/external/routes/v1.py b/autogpt_platform/backend/backend/api/external/v1/routes.py similarity index 96% rename from autogpt_platform/backend/backend/server/external/routes/v1.py rename to autogpt_platform/backend/backend/api/external/v1/routes.py index f83673465a..58e15dc6a3 100644 --- a/autogpt_platform/backend/backend/server/external/routes/v1.py +++ b/autogpt_platform/backend/backend/api/external/v1/routes.py @@ -8,23 +8,29 @@ from prisma.enums import AgentExecutionStatus, APIKeyPermission from pydantic import BaseModel, Field from typing_extensions import TypedDict +import backend.api.features.store.cache as store_cache +import backend.api.features.store.model as store_model import backend.data.block -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.model as store_model +from backend.api.external.middleware import require_permission from backend.data import execution as execution_db from backend.data import graph as graph_db from backend.data import user as user_db from backend.data.auth.base import APIAuthorizationInfo from backend.data.block import BlockInput, CompletedBlockOutput from backend.executor.utils import add_graph_execution -from backend.server.external.middleware import require_permission from backend.util.settings import Settings +from .integrations import integrations_router +from .tools import tools_router + settings = Settings() logger = logging.getLogger(__name__) v1_router = APIRouter() +v1_router.include_router(integrations_router) +v1_router.include_router(tools_router) + class UserInfoResponse(BaseModel): id: str diff --git a/autogpt_platform/backend/backend/server/external/routes/tools.py b/autogpt_platform/backend/backend/api/external/v1/tools.py similarity index 94% rename from autogpt_platform/backend/backend/server/external/routes/tools.py rename to autogpt_platform/backend/backend/api/external/v1/tools.py index 8e3f4cbfdb..9e362fb32c 100644 --- a/autogpt_platform/backend/backend/server/external/routes/tools.py +++ b/autogpt_platform/backend/backend/api/external/v1/tools.py @@ -14,11 +14,11 @@ from fastapi import APIRouter, Security from prisma.enums import APIKeyPermission from pydantic import BaseModel, Field +from backend.api.external.middleware import require_permission +from backend.api.features.chat.model import ChatSession +from backend.api.features.chat.tools import find_agent_tool, run_agent_tool +from backend.api.features.chat.tools.models import ToolResponseBase from backend.data.auth.base import APIAuthorizationInfo -from backend.server.external.middleware import require_permission -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool -from backend.server.v2.chat.tools.models import ToolResponseBase logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/routers/postmark/__init__.py b/autogpt_platform/backend/backend/api/features/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/routers/postmark/__init__.py rename to autogpt_platform/backend/backend/api/features/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/library/__init__.py b/autogpt_platform/backend/backend/api/features/admin/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/__init__.py rename to autogpt_platform/backend/backend/api/features/admin/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py rename to autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py index e4ea2c7f32..8930172c7f 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes.py +++ b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes.py @@ -6,9 +6,10 @@ from fastapi import APIRouter, Body, Security from prisma.enums import CreditTransactionType from backend.data.credit import admin_get_user_history, get_user_credit_model -from backend.server.v2.admin.model import AddUserCreditsResponse, UserHistoryResponse from backend.util.json import SafeJson +from .model import AddUserCreditsResponse, UserHistoryResponse + logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py similarity index 90% rename from autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py rename to autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py index 0248da352f..db2d3cb41a 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/credit_admin_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/admin/credit_admin_routes_test.py @@ -9,14 +9,15 @@ import pytest_mock from autogpt_libs.auth.jwt_utils import get_jwt_payload from pytest_snapshot.plugin import Snapshot -import backend.server.v2.admin.credit_admin_routes as credit_admin_routes -import backend.server.v2.admin.model as admin_model from backend.data.model import UserTransaction from backend.util.json import SafeJson from backend.util.models import Pagination +from .credit_admin_routes import router as credit_admin_router +from .model import UserHistoryResponse + app = fastapi.FastAPI() -app.include_router(credit_admin_routes.router) +app.include_router(credit_admin_router) client = fastapi.testclient.TestClient(app) @@ -30,7 +31,7 @@ def setup_app_admin_auth(mock_jwt_admin): def test_add_user_credits_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, configured_snapshot: Snapshot, admin_user_id: str, target_user_id: str, @@ -42,7 +43,7 @@ def test_add_user_credits_success( return_value=(1500, "transaction-123-uuid") ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.get_user_credit_model", + "backend.api.features.admin.credit_admin_routes.get_user_credit_model", return_value=mock_credit_model, ) @@ -84,7 +85,7 @@ def test_add_user_credits_success( def test_add_user_credits_negative_amount( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test credit deduction by admin (negative amount)""" @@ -94,7 +95,7 @@ def test_add_user_credits_negative_amount( return_value=(200, "transaction-456-uuid") ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.get_user_credit_model", + "backend.api.features.admin.credit_admin_routes.get_user_credit_model", return_value=mock_credit_model, ) @@ -119,12 +120,12 @@ def test_add_user_credits_negative_amount( def test_get_user_history_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test successful retrieval of user credit history""" # Mock the admin_get_user_history function - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[ UserTransaction( user_id="user-1", @@ -150,7 +151,7 @@ def test_get_user_history_success( ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) @@ -170,12 +171,12 @@ def test_get_user_history_success( def test_get_user_history_with_filters( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test user credit history with search and filter parameters""" # Mock the admin_get_user_history function - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[ UserTransaction( user_id="user-3", @@ -194,7 +195,7 @@ def test_get_user_history_with_filters( ) mock_get_history = mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) @@ -230,12 +231,12 @@ def test_get_user_history_with_filters( def test_get_user_history_empty_results( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, ) -> None: """Test user credit history with no results""" # Mock empty history response - mock_history_response = admin_model.UserHistoryResponse( + mock_history_response = UserHistoryResponse( history=[], pagination=Pagination( total_items=0, @@ -246,7 +247,7 @@ def test_get_user_history_empty_results( ) mocker.patch( - "backend.server.v2.admin.credit_admin_routes.admin_get_user_history", + "backend.api.features.admin.credit_admin_routes.admin_get_user_history", return_value=mock_history_response, ) diff --git a/autogpt_platform/backend/backend/server/v2/admin/execution_analytics_routes.py b/autogpt_platform/backend/backend/api/features/admin/execution_analytics_routes.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/admin/execution_analytics_routes.py rename to autogpt_platform/backend/backend/api/features/admin/execution_analytics_routes.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/model.py b/autogpt_platform/backend/backend/api/features/admin/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/admin/model.py rename to autogpt_platform/backend/backend/api/features/admin/model.py diff --git a/autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py b/autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py similarity index 84% rename from autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py rename to autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py index c611c43f5a..9c4b89fee6 100644 --- a/autogpt_platform/backend/backend/server/v2/admin/store_admin_routes.py +++ b/autogpt_platform/backend/backend/api/features/admin/store_admin_routes.py @@ -7,9 +7,9 @@ import fastapi import fastapi.responses import prisma.enums -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.db -import backend.server.v2.store.model +import backend.api.features.store.cache as store_cache +import backend.api.features.store.db as store_db +import backend.api.features.store.model as store_model import backend.util.json logger = logging.getLogger(__name__) @@ -24,7 +24,7 @@ router = fastapi.APIRouter( @router.get( "/listings", summary="Get Admin Listings History", - response_model=backend.server.v2.store.model.StoreListingsWithVersionsResponse, + response_model=store_model.StoreListingsWithVersionsResponse, ) async def get_admin_listings_with_versions( status: typing.Optional[prisma.enums.SubmissionStatus] = None, @@ -48,7 +48,7 @@ async def get_admin_listings_with_versions( StoreListingsWithVersionsResponse with listings and their versions """ try: - listings = await backend.server.v2.store.db.get_admin_listings_with_versions( + listings = await store_db.get_admin_listings_with_versions( status=status, search_query=search, page=page, @@ -68,11 +68,11 @@ async def get_admin_listings_with_versions( @router.post( "/submissions/{store_listing_version_id}/review", summary="Review Store Submission", - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def review_submission( store_listing_version_id: str, - request: backend.server.v2.store.model.ReviewSubmissionRequest, + request: store_model.ReviewSubmissionRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -87,12 +87,10 @@ async def review_submission( StoreSubmission with updated review information """ try: - already_approved = ( - await backend.server.v2.store.db.check_submission_already_approved( - store_listing_version_id=store_listing_version_id, - ) + already_approved = await store_db.check_submission_already_approved( + store_listing_version_id=store_listing_version_id, ) - submission = await backend.server.v2.store.db.review_store_submission( + submission = await store_db.review_store_submission( store_listing_version_id=store_listing_version_id, is_approved=request.is_approved, external_comments=request.comments, @@ -136,7 +134,7 @@ async def admin_download_agent_file( Raises: HTTPException: If the agent is not found or an unexpected error occurs. """ - graph_data = await backend.server.v2.store.db.get_agent_as_admin( + graph_data = await store_db.get_agent_as_admin( user_id=user_id, store_listing_version_id=store_listing_version_id, ) diff --git a/autogpt_platform/backend/backend/server/routers/analytics.py b/autogpt_platform/backend/backend/api/features/analytics.py similarity index 94% rename from autogpt_platform/backend/backend/server/routers/analytics.py rename to autogpt_platform/backend/backend/api/features/analytics.py index 98c2dd8e96..73a4590dcb 100644 --- a/autogpt_platform/backend/backend/server/routers/analytics.py +++ b/autogpt_platform/backend/backend/api/features/analytics.py @@ -6,10 +6,11 @@ from typing import Annotated import fastapi import pydantic from autogpt_libs.auth import get_user_id +from autogpt_libs.auth.dependencies import requires_user import backend.data.analytics -router = fastapi.APIRouter() +router = fastapi.APIRouter(dependencies=[fastapi.Security(requires_user)]) logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/analytics_test.py b/autogpt_platform/backend/backend/api/features/analytics_test.py new file mode 100644 index 0000000000..2493bdb7e4 --- /dev/null +++ b/autogpt_platform/backend/backend/api/features/analytics_test.py @@ -0,0 +1,340 @@ +"""Tests for analytics API endpoints.""" + +import json +from unittest.mock import AsyncMock, Mock + +import fastapi +import fastapi.testclient +import pytest +import pytest_mock +from pytest_snapshot.plugin import Snapshot + +from .analytics import router as analytics_router + +app = fastapi.FastAPI() +app.include_router(analytics_router) + +client = fastapi.testclient.TestClient(app) + + +@pytest.fixture(autouse=True) +def setup_app_auth(mock_jwt_user): + """Setup auth overrides for all tests in this module.""" + from autogpt_libs.auth.jwt_utils import get_jwt_payload + + app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] + yield + app.dependency_overrides.clear() + + +# ============================================================================= +# /log_raw_metric endpoint tests +# ============================================================================= + + +def test_log_raw_metric_success( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + test_user_id: str, +) -> None: + """Test successful raw metric logging.""" + mock_result = Mock(id="metric-123-uuid") + mock_log_metric = mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "metric_name": "page_load_time", + "metric_value": 2.5, + "data_string": "/dashboard", + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 200, f"Unexpected response: {response.text}" + assert response.json() == "metric-123-uuid" + + mock_log_metric.assert_called_once_with( + user_id=test_user_id, + metric_name="page_load_time", + metric_value=2.5, + data_string="/dashboard", + ) + + configured_snapshot.assert_match( + json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), + "analytics_log_metric_success", + ) + + +@pytest.mark.parametrize( + "metric_value,metric_name,data_string,test_id", + [ + (100, "api_calls_count", "external_api", "integer_value"), + (0, "error_count", "no_errors", "zero_value"), + (-5.2, "temperature_delta", "cooling", "negative_value"), + (1.23456789, "precision_test", "float_precision", "float_precision"), + (999999999, "large_number", "max_value", "large_number"), + (0.0000001, "tiny_number", "min_value", "tiny_number"), + ], +) +def test_log_raw_metric_various_values( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + metric_value: float, + metric_name: str, + data_string: str, + test_id: str, +) -> None: + """Test raw metric logging with various metric values.""" + mock_result = Mock(id=f"metric-{test_id}-uuid") + mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "metric_name": metric_name, + "metric_value": metric_value, + "data_string": data_string, + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 200, f"Failed for {test_id}: {response.text}" + + configured_snapshot.assert_match( + json.dumps( + {"metric_id": response.json(), "test_case": test_id}, + indent=2, + sort_keys=True, + ), + f"analytics_metric_{test_id}", + ) + + +@pytest.mark.parametrize( + "invalid_data,expected_error", + [ + ({}, "Field required"), + ({"metric_name": "test"}, "Field required"), + ( + {"metric_name": "test", "metric_value": "not_a_number", "data_string": "x"}, + "Input should be a valid number", + ), + ( + {"metric_name": "", "metric_value": 1.0, "data_string": "test"}, + "String should have at least 1 character", + ), + ( + {"metric_name": "test", "metric_value": 1.0, "data_string": ""}, + "String should have at least 1 character", + ), + ], + ids=[ + "empty_request", + "missing_metric_value_and_data_string", + "invalid_metric_value_type", + "empty_metric_name", + "empty_data_string", + ], +) +def test_log_raw_metric_validation_errors( + invalid_data: dict, + expected_error: str, +) -> None: + """Test validation errors for invalid metric requests.""" + response = client.post("/log_raw_metric", json=invalid_data) + + assert response.status_code == 422 + error_detail = response.json() + assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}" + + error_text = json.dumps(error_detail) + assert ( + expected_error in error_text + ), f"Expected '{expected_error}' in error response: {error_text}" + + +def test_log_raw_metric_service_error( + mocker: pytest_mock.MockFixture, + test_user_id: str, +) -> None: + """Test error handling when analytics service fails.""" + mocker.patch( + "backend.data.analytics.log_raw_metric", + new_callable=AsyncMock, + side_effect=Exception("Database connection failed"), + ) + + request_data = { + "metric_name": "test_metric", + "metric_value": 1.0, + "data_string": "test", + } + + response = client.post("/log_raw_metric", json=request_data) + + assert response.status_code == 500 + error_detail = response.json()["detail"] + assert "Database connection failed" in error_detail["message"] + assert "hint" in error_detail + + +# ============================================================================= +# /log_raw_analytics endpoint tests +# ============================================================================= + + +def test_log_raw_analytics_success( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, + test_user_id: str, +) -> None: + """Test successful raw analytics logging.""" + mock_result = Mock(id="analytics-789-uuid") + mock_log_analytics = mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "type": "user_action", + "data": { + "action": "button_click", + "button_id": "submit_form", + "timestamp": "2023-01-01T00:00:00Z", + "metadata": {"form_type": "registration", "fields_filled": 5}, + }, + "data_index": "button_click_submit_form", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 200, f"Unexpected response: {response.text}" + assert response.json() == "analytics-789-uuid" + + mock_log_analytics.assert_called_once_with( + test_user_id, + "user_action", + request_data["data"], + "button_click_submit_form", + ) + + configured_snapshot.assert_match( + json.dumps({"analytics_id": response.json()}, indent=2, sort_keys=True), + "analytics_log_analytics_success", + ) + + +def test_log_raw_analytics_complex_data( + mocker: pytest_mock.MockFixture, + configured_snapshot: Snapshot, +) -> None: + """Test raw analytics logging with complex nested data structures.""" + mock_result = Mock(id="analytics-complex-uuid") + mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + return_value=mock_result, + ) + + request_data = { + "type": "agent_execution", + "data": { + "agent_id": "agent_123", + "execution_id": "exec_456", + "status": "completed", + "duration_ms": 3500, + "nodes_executed": 15, + "blocks_used": [ + {"block_id": "llm_block", "count": 3}, + {"block_id": "http_block", "count": 5}, + {"block_id": "code_block", "count": 2}, + ], + "errors": [], + "metadata": { + "trigger": "manual", + "user_tier": "premium", + "environment": "production", + }, + }, + "data_index": "agent_123_exec_456", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 200 + + configured_snapshot.assert_match( + json.dumps( + {"analytics_id": response.json(), "logged_data": request_data["data"]}, + indent=2, + sort_keys=True, + ), + "analytics_log_analytics_complex_data", + ) + + +@pytest.mark.parametrize( + "invalid_data,expected_error", + [ + ({}, "Field required"), + ({"type": "test"}, "Field required"), + ( + {"type": "test", "data": "not_a_dict", "data_index": "test"}, + "Input should be a valid dictionary", + ), + ({"type": "test", "data": {"key": "value"}}, "Field required"), + ], + ids=[ + "empty_request", + "missing_data_and_data_index", + "invalid_data_type", + "missing_data_index", + ], +) +def test_log_raw_analytics_validation_errors( + invalid_data: dict, + expected_error: str, +) -> None: + """Test validation errors for invalid analytics requests.""" + response = client.post("/log_raw_analytics", json=invalid_data) + + assert response.status_code == 422 + error_detail = response.json() + assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}" + + error_text = json.dumps(error_detail) + assert ( + expected_error in error_text + ), f"Expected '{expected_error}' in error response: {error_text}" + + +def test_log_raw_analytics_service_error( + mocker: pytest_mock.MockFixture, + test_user_id: str, +) -> None: + """Test error handling when analytics service fails.""" + mocker.patch( + "backend.data.analytics.log_raw_analytics", + new_callable=AsyncMock, + side_effect=Exception("Analytics DB unreachable"), + ) + + request_data = { + "type": "test_event", + "data": {"key": "value"}, + "data_index": "test_index", + } + + response = client.post("/log_raw_analytics", json=request_data) + + assert response.status_code == 500 + error_detail = response.json()["detail"] + assert "Analytics DB unreachable" in error_detail["message"] + assert "hint" in error_detail diff --git a/autogpt_platform/backend/backend/server/v2/store/__init__.py b/autogpt_platform/backend/backend/api/features/builder/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/__init__.py rename to autogpt_platform/backend/backend/api/features/builder/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/builder/db.py b/autogpt_platform/backend/backend/api/features/builder/db.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/builder/db.py rename to autogpt_platform/backend/backend/api/features/builder/db.py index 9856d53c0e..7177fa4dc6 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/db.py +++ b/autogpt_platform/backend/backend/api/features/builder/db.py @@ -6,17 +6,20 @@ from typing import Sequence import prisma +import backend.api.features.library.db as library_db +import backend.api.features.library.model as library_model +import backend.api.features.store.db as store_db +import backend.api.features.store.model as store_model import backend.data.block -import backend.server.v2.library.db as library_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.db as store_db -import backend.server.v2.store.model as store_model from backend.blocks import load_all_blocks from backend.blocks.llm import LlmModel from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema from backend.data.db import query_raw_with_schema from backend.integrations.providers import ProviderName -from backend.server.v2.builder.model import ( +from backend.util.cache import cached +from backend.util.models import Pagination + +from .model import ( BlockCategoryResponse, BlockResponse, BlockType, @@ -26,8 +29,6 @@ from backend.server.v2.builder.model import ( ProviderResponse, SearchEntry, ) -from backend.util.cache import cached -from backend.util.models import Pagination logger = logging.getLogger(__name__) llm_models = [name.name.lower().replace("_", " ") for name in LlmModel] diff --git a/autogpt_platform/backend/backend/server/v2/builder/model.py b/autogpt_platform/backend/backend/api/features/builder/model.py similarity index 93% rename from autogpt_platform/backend/backend/server/v2/builder/model.py rename to autogpt_platform/backend/backend/api/features/builder/model.py index 4a1de595d1..fcd19dba94 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/model.py +++ b/autogpt_platform/backend/backend/api/features/builder/model.py @@ -2,8 +2,8 @@ from typing import Literal from pydantic import BaseModel -import backend.server.v2.library.model as library_model -import backend.server.v2.store.model as store_model +import backend.api.features.library.model as library_model +import backend.api.features.store.model as store_model from backend.data.block import BlockInfo from backend.integrations.providers import ProviderName from backend.util.models import Pagination diff --git a/autogpt_platform/backend/backend/server/v2/builder/routes.py b/autogpt_platform/backend/backend/api/features/builder/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/builder/routes.py rename to autogpt_platform/backend/backend/api/features/builder/routes.py index b87bf8ca1a..7fe9cab189 100644 --- a/autogpt_platform/backend/backend/server/v2/builder/routes.py +++ b/autogpt_platform/backend/backend/api/features/builder/routes.py @@ -4,11 +4,12 @@ from typing import Annotated, Sequence import fastapi from autogpt_libs.auth.dependencies import get_user_id, requires_user -import backend.server.v2.builder.db as builder_db -import backend.server.v2.builder.model as builder_model from backend.integrations.providers import ProviderName from backend.util.models import Pagination +from . import db as builder_db +from . import model as builder_model + logger = logging.getLogger(__name__) router = fastapi.APIRouter( diff --git a/autogpt_platform/backend/backend/api/features/chat/__init__.py b/autogpt_platform/backend/backend/api/features/chat/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/chat/config.py b/autogpt_platform/backend/backend/api/features/chat/config.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/config.py rename to autogpt_platform/backend/backend/api/features/chat/config.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/model.py b/autogpt_platform/backend/backend/api/features/chat/model.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/model.py rename to autogpt_platform/backend/backend/api/features/chat/model.py index 0f06ddab00..b8aea5a334 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/model.py +++ b/autogpt_platform/backend/backend/api/features/chat/model.py @@ -19,9 +19,10 @@ from openai.types.chat.chat_completion_message_tool_call_param import ( from pydantic import BaseModel from backend.data.redis_client import get_redis_async -from backend.server.v2.chat.config import ChatConfig from backend.util.exceptions import RedisError +from .config import ChatConfig + logger = logging.getLogger(__name__) config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/model_test.py b/autogpt_platform/backend/backend/api/features/chat/model_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/v2/chat/model_test.py rename to autogpt_platform/backend/backend/api/features/chat/model_test.py index f9c79b331b..b7f4c8a493 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/model_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/model_test.py @@ -1,6 +1,6 @@ import pytest -from backend.server.v2.chat.model import ( +from .model import ( ChatMessage, ChatSession, Usage, diff --git a/autogpt_platform/backend/backend/server/v2/chat/prompts/chat_system.md b/autogpt_platform/backend/backend/api/features/chat/prompts/chat_system.md similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/prompts/chat_system.md rename to autogpt_platform/backend/backend/api/features/chat/prompts/chat_system.md diff --git a/autogpt_platform/backend/backend/server/v2/chat/response_model.py b/autogpt_platform/backend/backend/api/features/chat/response_model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/response_model.py rename to autogpt_platform/backend/backend/api/features/chat/response_model.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/routes.py b/autogpt_platform/backend/backend/api/features/chat/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/routes.py rename to autogpt_platform/backend/backend/api/features/chat/routes.py index 86bcf861c0..667335d048 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/routes.py +++ b/autogpt_platform/backend/backend/api/features/chat/routes.py @@ -9,10 +9,11 @@ from fastapi import APIRouter, Depends, Query, Security from fastapi.responses import StreamingResponse from pydantic import BaseModel -import backend.server.v2.chat.service as chat_service -from backend.server.v2.chat.config import ChatConfig from backend.util.exceptions import NotFoundError +from . import service as chat_service +from .config import ChatConfig + config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/service.py b/autogpt_platform/backend/backend/api/features/chat/service.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/service.py rename to autogpt_platform/backend/backend/api/features/chat/service.py index 4328deb016..2d96d4abcd 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/service.py +++ b/autogpt_platform/backend/backend/api/features/chat/service.py @@ -7,15 +7,17 @@ import orjson from openai import AsyncOpenAI from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam -import backend.server.v2.chat.config -from backend.server.v2.chat.model import ( +from backend.util.exceptions import NotFoundError + +from .config import ChatConfig +from .model import ( ChatMessage, ChatSession, Usage, get_chat_session, upsert_chat_session, ) -from backend.server.v2.chat.response_model import ( +from .response_model import ( StreamBaseResponse, StreamEnd, StreamError, @@ -26,12 +28,11 @@ from backend.server.v2.chat.response_model import ( StreamToolExecutionResult, StreamUsage, ) -from backend.server.v2.chat.tools import execute_tool, tools -from backend.util.exceptions import NotFoundError +from .tools import execute_tool, tools logger = logging.getLogger(__name__) -config = backend.server.v2.chat.config.ChatConfig() +config = ChatConfig() client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url) diff --git a/autogpt_platform/backend/backend/server/v2/chat/service_test.py b/autogpt_platform/backend/backend/api/features/chat/service_test.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/chat/service_test.py rename to autogpt_platform/backend/backend/api/features/chat/service_test.py index df3d64976e..d1af22a71a 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/service_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/service_test.py @@ -3,8 +3,8 @@ from os import getenv import pytest -import backend.server.v2.chat.service as chat_service -from backend.server.v2.chat.response_model import ( +from . import service as chat_service +from .response_model import ( StreamEnd, StreamError, StreamTextChunk, diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py b/autogpt_platform/backend/backend/api/features/chat/tools/__init__.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py rename to autogpt_platform/backend/backend/api/features/chat/tools/__init__.py index 50f0d9892b..5b9b8649a8 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/__init__.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/__init__.py @@ -2,14 +2,14 @@ from typing import TYPE_CHECKING, Any from openai.types.chat import ChatCompletionToolParam -from backend.server.v2.chat.model import ChatSession +from backend.api.features.chat.model import ChatSession from .base import BaseTool from .find_agent import FindAgentTool from .run_agent import RunAgentTool if TYPE_CHECKING: - from backend.server.v2.chat.response_model import StreamToolExecutionResult + from backend.api.features.chat.response_model import StreamToolExecutionResult # Initialize tool instances find_agent_tool = FindAgentTool() diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py b/autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py rename to autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py index 162894f5f1..f75b7bb0d0 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/_test_data.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/_test_data.py @@ -5,6 +5,8 @@ from os import getenv import pytest from pydantic import SecretStr +from backend.api.features.chat.model import ChatSession +from backend.api.features.store import db as store_db from backend.blocks.firecrawl.scrape import FirecrawlScrapeBlock from backend.blocks.io import AgentInputBlock, AgentOutputBlock from backend.blocks.llm import AITextGeneratorBlock @@ -13,8 +15,6 @@ from backend.data.graph import Graph, Link, Node, create_graph from backend.data.model import APIKeyCredentials from backend.data.user import get_or_create_user from backend.integrations.credentials_store import IntegrationCredentialsStore -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.store import db as store_db def make_session(user_id: str | None = None): diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/base.py b/autogpt_platform/backend/backend/api/features/chat/tools/base.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/chat/tools/base.py rename to autogpt_platform/backend/backend/api/features/chat/tools/base.py index f6c4c06284..b4c9d8d731 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/base.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/base.py @@ -5,8 +5,8 @@ from typing import Any from openai.types.chat import ChatCompletionToolParam -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.response_model import StreamToolExecutionResult +from backend.api.features.chat.model import ChatSession +from backend.api.features.chat.response_model import StreamToolExecutionResult from .models import ErrorResponse, NeedLoginResponse, ToolResponseBase diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py b/autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py rename to autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py index 111041a8f4..3ad071f412 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/find_agent.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/find_agent.py @@ -3,17 +3,18 @@ import logging from typing import Any -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools.base import BaseTool -from backend.server.v2.chat.tools.models import ( +from backend.api.features.chat.model import ChatSession +from backend.api.features.store import db as store_db +from backend.util.exceptions import DatabaseError, NotFoundError + +from .base import BaseTool +from .models import ( AgentCarouselResponse, AgentInfo, ErrorResponse, NoResultsResponse, ToolResponseBase, ) -from backend.server.v2.store import db as store_db -from backend.util.exceptions import DatabaseError, NotFoundError logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/models.py b/autogpt_platform/backend/backend/api/features/chat/tools/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/chat/tools/models.py rename to autogpt_platform/backend/backend/api/features/chat/tools/models.py diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py rename to autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py index 9ba2eab893..931e075021 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent.py @@ -5,14 +5,21 @@ from typing import Any from pydantic import BaseModel, Field, field_validator +from backend.api.features.chat.config import ChatConfig +from backend.api.features.chat.model import ChatSession from backend.data.graph import GraphModel from backend.data.model import CredentialsMetaInput from backend.data.user import get_user_by_id from backend.executor import utils as execution_utils -from backend.server.v2.chat.config import ChatConfig -from backend.server.v2.chat.model import ChatSession -from backend.server.v2.chat.tools.base import BaseTool -from backend.server.v2.chat.tools.models import ( +from backend.util.clients import get_scheduler_client +from backend.util.exceptions import DatabaseError, NotFoundError +from backend.util.timezone_utils import ( + convert_utc_time_to_user_timezone, + get_user_timezone_or_utc, +) + +from .base import BaseTool +from .models import ( AgentDetails, AgentDetailsResponse, ErrorResponse, @@ -23,19 +30,13 @@ from backend.server.v2.chat.tools.models import ( ToolResponseBase, UserReadiness, ) -from backend.server.v2.chat.tools.utils import ( +from .utils import ( check_user_has_required_credentials, extract_credentials_from_schema, fetch_graph_from_store_slug, get_or_create_library_agent, match_user_credentials_to_graph, ) -from backend.util.clients import get_scheduler_client -from backend.util.exceptions import DatabaseError, NotFoundError -from backend.util.timezone_utils import ( - convert_utc_time_to_user_timezone, - get_user_timezone_or_utc, -) logger = logging.getLogger(__name__) config = ChatConfig() diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py rename to autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py index 3ffd4a883e..ebad1a0050 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/run_agent_test.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_agent_test.py @@ -3,13 +3,13 @@ import uuid import orjson import pytest -from backend.server.v2.chat.tools._test_data import ( +from ._test_data import ( make_session, setup_firecrawl_test_data, setup_llm_test_data, setup_test_data, ) -from backend.server.v2.chat.tools.run_agent import RunAgentTool +from .run_agent import RunAgentTool # This is so the formatter doesn't remove the fixture imports setup_llm_test_data = setup_llm_test_data diff --git a/autogpt_platform/backend/backend/server/v2/chat/tools/utils.py b/autogpt_platform/backend/backend/api/features/chat/tools/utils.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/chat/tools/utils.py rename to autogpt_platform/backend/backend/api/features/chat/tools/utils.py index ef4bc6f272..19e092c312 100644 --- a/autogpt_platform/backend/backend/server/v2/chat/tools/utils.py +++ b/autogpt_platform/backend/backend/api/features/chat/tools/utils.py @@ -3,13 +3,13 @@ import logging from typing import Any +from backend.api.features.library import db as library_db +from backend.api.features.library import model as library_model +from backend.api.features.store import db as store_db from backend.data import graph as graph_db from backend.data.graph import GraphModel from backend.data.model import CredentialsMetaInput from backend.integrations.creds_manager import IntegrationCredentialsManager -from backend.server.v2.library import db as library_db -from backend.server.v2.library import model as library_model -from backend.server.v2.store import db as store_db from backend.util.exceptions import NotFoundError logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/executions/__init__.py b/autogpt_platform/backend/backend/api/features/executions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/api/features/executions/review/__init__.py b/autogpt_platform/backend/backend/api/features/executions/review/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/model.py b/autogpt_platform/backend/backend/api/features/executions/review/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/executions/review/model.py rename to autogpt_platform/backend/backend/api/features/executions/review/model.py diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py rename to autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py index 2e62641ad3..9d1df5f999 100644 --- a/autogpt_platform/backend/backend/server/v2/executions/review/review_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py @@ -7,9 +7,10 @@ import pytest_mock from prisma.enums import ReviewStatus from pytest_snapshot.plugin import Snapshot -from backend.server.rest_api import handle_internal_http_error -from backend.server.v2.executions.review.model import PendingHumanReviewModel -from backend.server.v2.executions.review.routes import router +from backend.api.rest_api import handle_internal_http_error + +from .model import PendingHumanReviewModel +from .routes import router # Using a fixed timestamp for reproducible tests FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc) @@ -60,7 +61,7 @@ def test_get_pending_reviews_empty( ) -> None: """Test getting pending reviews when none exist""" mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_user" + "backend.api.features.executions.review.routes.get_pending_reviews_for_user" ) mock_get_reviews.return_value = [] @@ -79,7 +80,7 @@ def test_get_pending_reviews_with_data( ) -> None: """Test getting pending reviews with data""" mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_user" + "backend.api.features.executions.review.routes.get_pending_reviews_for_user" ) mock_get_reviews.return_value = [sample_pending_review] @@ -101,7 +102,7 @@ def test_get_pending_reviews_for_execution_success( ) -> None: """Test getting pending reviews for specific execution""" mock_get_graph_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_graph_execution_meta" + "backend.api.features.executions.review.routes.get_graph_execution_meta" ) mock_get_graph_execution.return_value = { "id": "test_graph_exec_456", @@ -109,7 +110,7 @@ def test_get_pending_reviews_for_execution_success( } mock_get_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews.return_value = [sample_pending_review] @@ -127,7 +128,7 @@ def test_get_pending_reviews_for_execution_access_denied( ) -> None: """Test access denied when user doesn't own the execution""" mock_get_graph_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_graph_execution_meta" + "backend.api.features.executions.review.routes.get_graph_execution_meta" ) mock_get_graph_execution.return_value = None @@ -146,12 +147,12 @@ def test_process_review_action_approve_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # Create approved review for return approved_review = PendingHumanReviewModel( @@ -174,11 +175,11 @@ def test_process_review_action_approve_success( mock_process_all_reviews.return_value = {"test_node_123": approved_review} mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False - mocker.patch("backend.server.v2.executions.review.routes.add_graph_execution") + mocker.patch("backend.api.features.executions.review.routes.add_graph_execution") request_data = { "reviews": [ @@ -210,12 +211,12 @@ def test_process_review_action_reject_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) rejected_review = PendingHumanReviewModel( node_exec_id="test_node_123", @@ -237,7 +238,7 @@ def test_process_review_action_reject_success( mock_process_all_reviews.return_value = {"test_node_123": rejected_review} mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False @@ -289,12 +290,12 @@ def test_process_review_action_mixed_success( # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review, second_review] mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # Create approved version of first review approved_review = PendingHumanReviewModel( @@ -338,7 +339,7 @@ def test_process_review_action_mixed_success( } mock_has_pending = mocker.patch( - "backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec" + "backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec" ) mock_has_pending.return_value = False @@ -392,13 +393,13 @@ def test_process_review_action_review_not_found( """Test error when review is not found""" # Mock the functions that extract graph execution ID from the request mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [] # No reviews found # Mock process_all_reviews to simulate not finding reviews mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) # This should raise a ValueError with "Reviews not found" message based on the data/human_review.py logic mock_process_all_reviews.side_effect = ValueError( @@ -429,13 +430,13 @@ def test_process_review_action_partial_failure( """Test handling of partial failures in review processing""" # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] # Mock partial failure in processing mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) mock_process_all_reviews.side_effect = ValueError("Some reviews failed validation") @@ -463,13 +464,13 @@ def test_process_review_action_invalid_node_exec_id( """Test failure when trying to process review with invalid node execution ID""" # Mock the route functions mock_get_reviews_for_execution = mocker.patch( - "backend.server.v2.executions.review.routes.get_pending_reviews_for_execution" + "backend.api.features.executions.review.routes.get_pending_reviews_for_execution" ) mock_get_reviews_for_execution.return_value = [sample_pending_review] # Mock validation failure - this should return 400, not 500 mock_process_all_reviews = mocker.patch( - "backend.server.v2.executions.review.routes.process_all_reviews_for_execution" + "backend.api.features.executions.review.routes.process_all_reviews_for_execution" ) mock_process_all_reviews.side_effect = ValueError( "Invalid node execution ID format" diff --git a/autogpt_platform/backend/backend/server/v2/executions/review/routes.py b/autogpt_platform/backend/backend/api/features/executions/review/routes.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/executions/review/routes.py rename to autogpt_platform/backend/backend/api/features/executions/review/routes.py index 14fb435457..4aa4fac49b 100644 --- a/autogpt_platform/backend/backend/server/v2/executions/review/routes.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/routes.py @@ -13,11 +13,8 @@ from backend.data.human_review import ( process_all_reviews_for_execution, ) from backend.executor.utils import add_graph_execution -from backend.server.v2.executions.review.model import ( - PendingHumanReviewModel, - ReviewRequest, - ReviewResponse, -) + +from .model import PendingHumanReviewModel, ReviewRequest, ReviewResponse logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/api/features/integrations/__init__.py b/autogpt_platform/backend/backend/api/features/integrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/integrations/models.py b/autogpt_platform/backend/backend/api/features/integrations/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/integrations/models.py rename to autogpt_platform/backend/backend/api/features/integrations/models.py diff --git a/autogpt_platform/backend/backend/server/integrations/router.py b/autogpt_platform/backend/backend/api/features/integrations/router.py similarity index 99% rename from autogpt_platform/backend/backend/server/integrations/router.py rename to autogpt_platform/backend/backend/api/features/integrations/router.py index b4227ad02a..f5dd8c092b 100644 --- a/autogpt_platform/backend/backend/server/integrations/router.py +++ b/autogpt_platform/backend/backend/api/features/integrations/router.py @@ -17,6 +17,8 @@ from fastapi import ( from pydantic import BaseModel, Field, SecretStr from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_502_BAD_GATEWAY +from backend.api.features.library.db import set_preset_webhook, update_preset +from backend.api.features.library.model import LibraryAgentPreset from backend.data.graph import NodeModel, get_graph, set_node_webhook from backend.data.integrations import ( WebhookEvent, @@ -45,13 +47,6 @@ from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME from backend.integrations.providers import ProviderName from backend.integrations.webhooks import get_webhook_manager -from backend.server.integrations.models import ( - ProviderConstants, - ProviderNamesResponse, - get_all_provider_names, -) -from backend.server.v2.library.db import set_preset_webhook, update_preset -from backend.server.v2.library.model import LibraryAgentPreset from backend.util.exceptions import ( GraphNotInLibraryError, MissingConfigError, @@ -60,6 +55,8 @@ from backend.util.exceptions import ( ) from backend.util.settings import Settings +from .models import ProviderConstants, ProviderNamesResponse, get_all_provider_names + if TYPE_CHECKING: from backend.integrations.oauth import BaseOAuthHandler diff --git a/autogpt_platform/backend/backend/api/features/library/__init__.py b/autogpt_platform/backend/backend/api/features/library/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/library/db.py b/autogpt_platform/backend/backend/api/features/library/db.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/library/db.py rename to autogpt_platform/backend/backend/api/features/library/db.py index 17a0efa7be..ad34326700 100644 --- a/autogpt_platform/backend/backend/server/v2/library/db.py +++ b/autogpt_platform/backend/backend/api/features/library/db.py @@ -4,16 +4,14 @@ from typing import Literal, Optional import fastapi import prisma.errors -import prisma.fields import prisma.models import prisma.types +import backend.api.features.store.exceptions as store_exceptions +import backend.api.features.store.image_gen as store_image_gen +import backend.api.features.store.media as store_media import backend.data.graph as graph_db import backend.data.integrations as integrations_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.exceptions as store_exceptions -import backend.server.v2.store.image_gen as store_image_gen -import backend.server.v2.store.media as store_media from backend.data.block import BlockInput from backend.data.db import transaction from backend.data.execution import get_graph_execution @@ -28,6 +26,8 @@ from backend.util.json import SafeJson from backend.util.models import Pagination from backend.util.settings import Config +from . import model as library_model + logger = logging.getLogger(__name__) config = Config() integration_creds_manager = IntegrationCredentialsManager() diff --git a/autogpt_platform/backend/backend/server/v2/library/db_test.py b/autogpt_platform/backend/backend/api/features/library/db_test.py similarity index 94% rename from autogpt_platform/backend/backend/server/v2/library/db_test.py rename to autogpt_platform/backend/backend/api/features/library/db_test.py index cb0095fb39..6023177070 100644 --- a/autogpt_platform/backend/backend/server/v2/library/db_test.py +++ b/autogpt_platform/backend/backend/api/features/library/db_test.py @@ -1,16 +1,15 @@ from datetime import datetime import prisma.enums -import prisma.errors import prisma.models -import prisma.types import pytest -import backend.server.v2.library.db as db -import backend.server.v2.store.exceptions +import backend.api.features.store.exceptions from backend.data.db import connect from backend.data.includes import library_agent_include +from . import db + @pytest.mark.asyncio async def test_get_library_agents(mocker): @@ -88,7 +87,7 @@ async def test_add_agent_to_library(mocker): await connect() # Mock the transaction context - mock_transaction = mocker.patch("backend.server.v2.library.db.transaction") + mock_transaction = mocker.patch("backend.api.features.library.db.transaction") mock_transaction.return_value.__aenter__ = mocker.AsyncMock(return_value=None) mock_transaction.return_value.__aexit__ = mocker.AsyncMock(return_value=None) # Mock data @@ -151,7 +150,7 @@ async def test_add_agent_to_library(mocker): ) # Mock graph_db.get_graph function that's called to check for HITL blocks - mock_graph_db = mocker.patch("backend.server.v2.library.db.graph_db") + mock_graph_db = mocker.patch("backend.api.features.library.db.graph_db") mock_graph_model = mocker.Mock() mock_graph_model.nodes = ( [] @@ -159,7 +158,9 @@ async def test_add_agent_to_library(mocker): mock_graph_db.get_graph = mocker.AsyncMock(return_value=mock_graph_model) # Mock the model conversion - mock_from_db = mocker.patch("backend.server.v2.library.model.LibraryAgent.from_db") + mock_from_db = mocker.patch( + "backend.api.features.library.model.LibraryAgent.from_db" + ) mock_from_db.return_value = mocker.Mock() # Call function @@ -217,7 +218,7 @@ async def test_add_agent_to_library_not_found(mocker): ) # Call function and verify exception - with pytest.raises(backend.server.v2.store.exceptions.AgentNotFoundError): + with pytest.raises(backend.api.features.store.exceptions.AgentNotFoundError): await db.add_store_agent_to_library("version123", "test-user") # Verify mock called correctly diff --git a/autogpt_platform/backend/backend/server/v2/library/model.py b/autogpt_platform/backend/backend/api/features/library/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/model.py rename to autogpt_platform/backend/backend/api/features/library/model.py diff --git a/autogpt_platform/backend/backend/server/v2/library/model_test.py b/autogpt_platform/backend/backend/api/features/library/model_test.py similarity index 95% rename from autogpt_platform/backend/backend/server/v2/library/model_test.py rename to autogpt_platform/backend/backend/api/features/library/model_test.py index d90ecf6f7a..a32b19322d 100644 --- a/autogpt_platform/backend/backend/server/v2/library/model_test.py +++ b/autogpt_platform/backend/backend/api/features/library/model_test.py @@ -3,7 +3,7 @@ import datetime import prisma.models import pytest -import backend.server.v2.library.model as library_model +from . import model as library_model @pytest.mark.asyncio diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/__init__.py b/autogpt_platform/backend/backend/api/features/library/routes/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/library/routes/__init__.py rename to autogpt_platform/backend/backend/api/features/library/routes/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/agents.py b/autogpt_platform/backend/backend/api/features/library/routes/agents.py similarity index 98% rename from autogpt_platform/backend/backend/server/v2/library/routes/agents.py rename to autogpt_platform/backend/backend/api/features/library/routes/agents.py index 173b6ab2b4..5a043009fc 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes/agents.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/agents.py @@ -6,12 +6,13 @@ from fastapi import APIRouter, Body, HTTPException, Query, Security, status from fastapi.responses import Response from prisma.enums import OnboardingStep -import backend.server.v2.library.db as library_db -import backend.server.v2.library.model as library_model -import backend.server.v2.store.exceptions as store_exceptions +import backend.api.features.store.exceptions as store_exceptions from backend.data.onboarding import complete_onboarding_step from backend.util.exceptions import DatabaseError, NotFoundError +from .. import db as library_db +from .. import model as library_model + logger = logging.getLogger(__name__) router = APIRouter( diff --git a/autogpt_platform/backend/backend/server/v2/library/routes/presets.py b/autogpt_platform/backend/backend/api/features/library/routes/presets.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/library/routes/presets.py rename to autogpt_platform/backend/backend/api/features/library/routes/presets.py index b1810395f0..cd4c04e0f2 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes/presets.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/presets.py @@ -4,8 +4,6 @@ from typing import Any, Optional import autogpt_libs.auth as autogpt_auth_lib from fastapi import APIRouter, Body, HTTPException, Query, Security, status -import backend.server.v2.library.db as db -import backend.server.v2.library.model as models from backend.data.execution import GraphExecutionMeta from backend.data.graph import get_graph from backend.data.integrations import get_webhook @@ -17,6 +15,9 @@ from backend.integrations.webhooks import get_webhook_manager from backend.integrations.webhooks.utils import setup_webhook_for_block from backend.util.exceptions import NotFoundError +from .. import db +from .. import model as models + logger = logging.getLogger(__name__) credentials_manager = IntegrationCredentialsManager() diff --git a/autogpt_platform/backend/backend/server/v2/library/routes_test.py b/autogpt_platform/backend/backend/api/features/library/routes_test.py similarity index 93% rename from autogpt_platform/backend/backend/server/v2/library/routes_test.py rename to autogpt_platform/backend/backend/api/features/library/routes_test.py index bd5d9827de..ad28b5b6bd 100644 --- a/autogpt_platform/backend/backend/server/v2/library/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/library/routes_test.py @@ -7,10 +7,11 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.library.model as library_model -from backend.server.v2.library.routes import router as library_router from backend.util.models import Pagination +from . import model as library_model +from .routes import router as library_router + app = fastapi.FastAPI() app.include_router(library_router) @@ -86,7 +87,7 @@ async def test_get_library_agents_success( total_items=2, total_pages=1, current_page=1, page_size=50 ), ) - mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents") + mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?search_term=test") @@ -112,7 +113,7 @@ async def test_get_library_agents_success( def test_get_library_agents_error(mocker: pytest_mock.MockFixture, test_user_id: str): - mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents") + mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents") mock_db_call.side_effect = Exception("Test error") response = client.get("/agents?search_term=test") @@ -161,7 +162,7 @@ async def test_get_favorite_library_agents_success( ), ) mock_db_call = mocker.patch( - "backend.server.v2.library.db.list_favorite_library_agents" + "backend.api.features.library.db.list_favorite_library_agents" ) mock_db_call.return_value = mocked_value @@ -184,7 +185,7 @@ def test_get_favorite_library_agents_error( mocker: pytest_mock.MockFixture, test_user_id: str ): mock_db_call = mocker.patch( - "backend.server.v2.library.db.list_favorite_library_agents" + "backend.api.features.library.db.list_favorite_library_agents" ) mock_db_call.side_effect = Exception("Test error") @@ -223,11 +224,11 @@ def test_add_agent_to_library_success( ) mock_db_call = mocker.patch( - "backend.server.v2.library.db.add_store_agent_to_library" + "backend.api.features.library.db.add_store_agent_to_library" ) mock_db_call.return_value = mock_library_agent mock_complete_onboarding = mocker.patch( - "backend.server.v2.library.routes.agents.complete_onboarding_step", + "backend.api.features.library.routes.agents.complete_onboarding_step", new_callable=AsyncMock, ) @@ -249,7 +250,7 @@ def test_add_agent_to_library_success( def test_add_agent_to_library_error(mocker: pytest_mock.MockFixture, test_user_id: str): mock_db_call = mocker.patch( - "backend.server.v2.library.db.add_store_agent_to_library" + "backend.api.features.library.db.add_store_agent_to_library" ) mock_db_call.side_effect = Exception("Test error") diff --git a/autogpt_platform/backend/backend/server/routers/oauth.py b/autogpt_platform/backend/backend/api/features/oauth.py similarity index 99% rename from autogpt_platform/backend/backend/server/routers/oauth.py rename to autogpt_platform/backend/backend/api/features/oauth.py index 55f591427a..023a433951 100644 --- a/autogpt_platform/backend/backend/server/routers/oauth.py +++ b/autogpt_platform/backend/backend/api/features/oauth.py @@ -5,11 +5,11 @@ Implements OAuth 2.0 Authorization Code flow with PKCE support. Flow: 1. User clicks "Login with AutoGPT" in 3rd party app -2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state +2. App redirects user to /auth/authorize with client_id, redirect_uri, scope, state 3. User sees consent screen (if not already logged in, redirects to login first) 4. User approves → backend creates authorization code 5. User redirected back to app with code -6. App exchanges code for access/refresh tokens at /oauth/token +6. App exchanges code for access/refresh tokens at /api/oauth/token 7. App uses access token to call external API endpoints """ diff --git a/autogpt_platform/backend/backend/server/routers/oauth_test.py b/autogpt_platform/backend/backend/api/features/oauth_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/routers/oauth_test.py rename to autogpt_platform/backend/backend/api/features/oauth_test.py index 8ec6911152..5f6b85a88a 100644 --- a/autogpt_platform/backend/backend/server/routers/oauth_test.py +++ b/autogpt_platform/backend/backend/api/features/oauth_test.py @@ -28,7 +28,7 @@ from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken from prisma.models import User as PrismaUser -from backend.server.rest_api import app +from backend.api.rest_api import app keysmith = APIKeySmith() diff --git a/autogpt_platform/backend/backend/api/features/otto/__init__.py b/autogpt_platform/backend/backend/api/features/otto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/otto/models.py b/autogpt_platform/backend/backend/api/features/otto/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/models.py rename to autogpt_platform/backend/backend/api/features/otto/models.py diff --git a/autogpt_platform/backend/backend/server/v2/otto/routes.py b/autogpt_platform/backend/backend/api/features/otto/routes.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/routes.py rename to autogpt_platform/backend/backend/api/features/otto/routes.py diff --git a/autogpt_platform/backend/backend/server/v2/otto/routes_test.py b/autogpt_platform/backend/backend/api/features/otto/routes_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/v2/otto/routes_test.py rename to autogpt_platform/backend/backend/api/features/otto/routes_test.py index 2641babe2b..416bcdee76 100644 --- a/autogpt_platform/backend/backend/server/v2/otto/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/otto/routes_test.py @@ -6,9 +6,9 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.otto.models as otto_models -import backend.server.v2.otto.routes as otto_routes -from backend.server.v2.otto.service import OttoService +from . import models as otto_models +from . import routes as otto_routes +from .service import OttoService app = fastapi.FastAPI() app.include_router(otto_routes.router) diff --git a/autogpt_platform/backend/backend/server/v2/otto/service.py b/autogpt_platform/backend/backend/api/features/otto/service.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/otto/service.py rename to autogpt_platform/backend/backend/api/features/otto/service.py diff --git a/autogpt_platform/backend/backend/api/features/postmark/__init__.py b/autogpt_platform/backend/backend/api/features/postmark/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/routers/postmark/models.py b/autogpt_platform/backend/backend/api/features/postmark/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/routers/postmark/models.py rename to autogpt_platform/backend/backend/api/features/postmark/models.py diff --git a/autogpt_platform/backend/backend/server/routers/postmark/postmark.py b/autogpt_platform/backend/backend/api/features/postmark/postmark.py similarity index 96% rename from autogpt_platform/backend/backend/server/routers/postmark/postmark.py rename to autogpt_platform/backend/backend/api/features/postmark/postmark.py index 2190aa5fce..224e30fa9d 100644 --- a/autogpt_platform/backend/backend/server/routers/postmark/postmark.py +++ b/autogpt_platform/backend/backend/api/features/postmark/postmark.py @@ -4,12 +4,15 @@ from typing import Annotated from fastapi import APIRouter, Body, HTTPException, Query, Security from fastapi.responses import JSONResponse +from backend.api.utils.api_key_auth import APIKeyAuthenticator from backend.data.user import ( get_user_by_email, set_user_email_verification, unsubscribe_user_by_token, ) -from backend.server.routers.postmark.models import ( +from backend.util.settings import Settings + +from .models import ( PostmarkBounceEnum, PostmarkBounceWebhook, PostmarkClickWebhook, @@ -19,8 +22,6 @@ from backend.server.routers.postmark.models import ( PostmarkSubscriptionChangeWebhook, PostmarkWebhook, ) -from backend.server.utils.api_key_auth import APIKeyAuthenticator -from backend.util.settings import Settings logger = logging.getLogger(__name__) settings = Settings() diff --git a/autogpt_platform/backend/backend/server/v2/store/README.md b/autogpt_platform/backend/backend/api/features/store/README.md similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/README.md rename to autogpt_platform/backend/backend/api/features/store/README.md diff --git a/autogpt_platform/backend/backend/api/features/store/__init__.py b/autogpt_platform/backend/backend/api/features/store/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autogpt_platform/backend/backend/server/v2/store/cache.py b/autogpt_platform/backend/backend/api/features/store/cache.py similarity index 85% rename from autogpt_platform/backend/backend/server/v2/store/cache.py rename to autogpt_platform/backend/backend/api/features/store/cache.py index 574403342e..7832069d49 100644 --- a/autogpt_platform/backend/backend/server/v2/store/cache.py +++ b/autogpt_platform/backend/backend/api/features/store/cache.py @@ -1,8 +1,9 @@ from typing import Literal -import backend.server.v2.store.db from backend.util.cache import cached +from . import db as store_db + ############################################## ############### Caches ####################### ############################################## @@ -29,7 +30,7 @@ async def _get_cached_store_agents( page_size: int, ): """Cached helper to get store agents.""" - return await backend.server.v2.store.db.get_store_agents( + return await store_db.get_store_agents( featured=featured, creators=[creator] if creator else None, sorted_by=sorted_by, @@ -44,7 +45,7 @@ async def _get_cached_store_agents( @cached(maxsize=200, ttl_seconds=300, shared_cache=True) async def _get_cached_agent_details(username: str, agent_name: str): """Cached helper to get agent details.""" - return await backend.server.v2.store.db.get_store_agent_details( + return await store_db.get_store_agent_details( username=username, agent_name=agent_name ) @@ -59,7 +60,7 @@ async def _get_cached_store_creators( page_size: int, ): """Cached helper to get store creators.""" - return await backend.server.v2.store.db.get_store_creators( + return await store_db.get_store_creators( featured=featured, search_query=search_query, sorted_by=sorted_by, @@ -72,6 +73,4 @@ async def _get_cached_store_creators( @cached(maxsize=100, ttl_seconds=300, shared_cache=True) async def _get_cached_creator_details(username: str): """Cached helper to get creator details.""" - return await backend.server.v2.store.db.get_store_creator_details( - username=username.lower() - ) + return await store_db.get_store_creator_details(username=username.lower()) diff --git a/autogpt_platform/backend/backend/server/v2/store/db.py b/autogpt_platform/backend/backend/api/features/store/db.py similarity index 92% rename from autogpt_platform/backend/backend/server/v2/store/db.py rename to autogpt_platform/backend/backend/api/features/store/db.py index 33554a9c2a..12f1783468 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db.py +++ b/autogpt_platform/backend/backend/api/features/store/db.py @@ -10,8 +10,6 @@ import prisma.errors import prisma.models import prisma.types -import backend.server.v2.store.exceptions -import backend.server.v2.store.model from backend.data.db import query_raw_with_schema, transaction from backend.data.graph import ( GraphMeta, @@ -30,6 +28,9 @@ from backend.notifications.notifications import queue_notification_async from backend.util.exceptions import DatabaseError from backend.util.settings import Settings +from . import exceptions as store_exceptions +from . import model as store_model + logger = logging.getLogger(__name__) settings = Settings() @@ -47,7 +48,7 @@ async def get_store_agents( category: str | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.StoreAgentsResponse: +) -> store_model.StoreAgentsResponse: """ Get PUBLIC store agents from the StoreAgent view """ @@ -148,10 +149,10 @@ async def get_store_agents( total_pages = (total + page_size - 1) // page_size # Convert raw results to StoreAgent models - store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + store_agents: list[store_model.StoreAgent] = [] for agent in agents: try: - store_agent = backend.server.v2.store.model.StoreAgent( + store_agent = store_model.StoreAgent( slug=agent["slug"], agent_name=agent["agent_name"], agent_image=( @@ -197,11 +198,11 @@ async def get_store_agents( total = await prisma.models.StoreAgent.prisma().count(where=where_clause) total_pages = (total + page_size - 1) // page_size - store_agents: list[backend.server.v2.store.model.StoreAgent] = [] + store_agents: list[store_model.StoreAgent] = [] for agent in agents: try: # Create the StoreAgent object safely - store_agent = backend.server.v2.store.model.StoreAgent( + store_agent = store_model.StoreAgent( slug=agent.slug, agent_name=agent.agent_name, agent_image=agent.agent_image[0] if agent.agent_image else "", @@ -223,9 +224,9 @@ async def get_store_agents( continue logger.debug(f"Found {len(store_agents)} agents") - return backend.server.v2.store.model.StoreAgentsResponse( + return store_model.StoreAgentsResponse( agents=store_agents, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -257,7 +258,7 @@ async def log_search_term(search_query: str): async def get_store_agent_details( username: str, agent_name: str -) -> backend.server.v2.store.model.StoreAgentDetails: +) -> store_model.StoreAgentDetails: """Get PUBLIC store agent details from the StoreAgent view""" logger.debug(f"Getting store agent details for {username}/{agent_name}") @@ -268,7 +269,7 @@ async def get_store_agent_details( if not agent: logger.warning(f"Agent not found: {username}/{agent_name}") - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent {username}/{agent_name} not found" ) @@ -322,7 +323,7 @@ async def get_store_agent_details( recommended_schedule_cron = None logger.debug(f"Found agent details for {username}/{agent_name}") - return backend.server.v2.store.model.StoreAgentDetails( + return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, slug=agent.slug, agent_name=agent.agent_name, @@ -342,7 +343,7 @@ async def get_store_agent_details( has_approved_version=has_approved_version, recommended_schedule_cron=recommended_schedule_cron, ) - except backend.server.v2.store.exceptions.AgentNotFoundError: + except store_exceptions.AgentNotFoundError: raise except Exception as e: logger.error(f"Error getting store agent details: {e}") @@ -378,7 +379,7 @@ async def get_available_graph(store_listing_version_id: str) -> GraphMeta: async def get_store_agent_by_version_id( store_listing_version_id: str, -) -> backend.server.v2.store.model.StoreAgentDetails: +) -> store_model.StoreAgentDetails: logger.debug(f"Getting store agent details for {store_listing_version_id}") try: @@ -388,12 +389,12 @@ async def get_store_agent_by_version_id( if not agent: logger.warning(f"Agent not found: {store_listing_version_id}") - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent {store_listing_version_id} not found" ) logger.debug(f"Found agent details for {store_listing_version_id}") - return backend.server.v2.store.model.StoreAgentDetails( + return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, slug=agent.slug, agent_name=agent.agent_name, @@ -410,7 +411,7 @@ async def get_store_agent_by_version_id( versions=agent.versions, last_updated=agent.updated_at, ) - except backend.server.v2.store.exceptions.AgentNotFoundError: + except store_exceptions.AgentNotFoundError: raise except Exception as e: logger.error(f"Error getting store agent details: {e}") @@ -423,7 +424,7 @@ async def get_store_creators( sorted_by: Literal["agent_rating", "agent_runs", "num_agents"] | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.CreatorsResponse: +) -> store_model.CreatorsResponse: """Get PUBLIC store creators from the Creator view""" logger.debug( f"Getting store creators. featured={featured}, search={search_query}, sorted_by={sorted_by}, page={page}" @@ -498,7 +499,7 @@ async def get_store_creators( # Convert to response model creator_models = [ - backend.server.v2.store.model.Creator( + store_model.Creator( username=creator.username, name=creator.name, description=creator.description, @@ -512,9 +513,9 @@ async def get_store_creators( ] logger.debug(f"Found {len(creator_models)} creators") - return backend.server.v2.store.model.CreatorsResponse( + return store_model.CreatorsResponse( creators=creator_models, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -528,7 +529,7 @@ async def get_store_creators( async def get_store_creator_details( username: str, -) -> backend.server.v2.store.model.CreatorDetails: +) -> store_model.CreatorDetails: logger.debug(f"Getting store creator details for {username}") try: @@ -539,12 +540,10 @@ async def get_store_creator_details( if not creator: logger.warning(f"Creator not found: {username}") - raise backend.server.v2.store.exceptions.CreatorNotFoundError( - f"Creator {username} not found" - ) + raise store_exceptions.CreatorNotFoundError(f"Creator {username} not found") logger.debug(f"Found creator details for {username}") - return backend.server.v2.store.model.CreatorDetails( + return store_model.CreatorDetails( name=creator.name, username=creator.username, description=creator.description, @@ -554,7 +553,7 @@ async def get_store_creator_details( agent_runs=creator.agent_runs, top_categories=creator.top_categories, ) - except backend.server.v2.store.exceptions.CreatorNotFoundError: + except store_exceptions.CreatorNotFoundError: raise except Exception as e: logger.error(f"Error getting store creator details: {e}") @@ -563,7 +562,7 @@ async def get_store_creator_details( async def get_store_submissions( user_id: str, page: int = 1, page_size: int = 20 -) -> backend.server.v2.store.model.StoreSubmissionsResponse: +) -> store_model.StoreSubmissionsResponse: """Get store submissions for the authenticated user -- not an admin""" logger.debug(f"Getting store submissions for user {user_id}, page={page}") @@ -588,7 +587,7 @@ async def get_store_submissions( # Convert to response models submission_models = [] for sub in submissions: - submission_model = backend.server.v2.store.model.StoreSubmission( + submission_model = store_model.StoreSubmission( agent_id=sub.agent_id, agent_version=sub.agent_version, name=sub.name, @@ -613,9 +612,9 @@ async def get_store_submissions( submission_models.append(submission_model) logger.debug(f"Found {len(submission_models)} submissions") - return backend.server.v2.store.model.StoreSubmissionsResponse( + return store_model.StoreSubmissionsResponse( submissions=submission_models, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -626,9 +625,9 @@ async def get_store_submissions( except Exception as e: logger.error(f"Error fetching store submissions: {e}") # Return empty response rather than exposing internal errors - return backend.server.v2.store.model.StoreSubmissionsResponse( + return store_model.StoreSubmissionsResponse( submissions=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=0, total_pages=0, @@ -661,7 +660,7 @@ async def delete_store_submission( if not submission: logger.warning(f"Submission not found for user {user_id}: {submission_id}") - raise backend.server.v2.store.exceptions.SubmissionNotFoundError( + raise store_exceptions.SubmissionNotFoundError( f"Submission not found for this user. User ID: {user_id}, Submission ID: {submission_id}" ) @@ -693,7 +692,7 @@ async def create_store_submission( categories: list[str] = [], changes_summary: str | None = "Initial Submission", recommended_schedule_cron: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Create the first (and only) store listing and thus submission as a normal user @@ -734,7 +733,7 @@ async def create_store_submission( logger.warning( f"Agent not found for user {user_id}: {agent_id} v{agent_version}" ) - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}" ) @@ -807,7 +806,7 @@ async def create_store_submission( logger.debug(f"Created store listing for agent {agent_id}") # Return submission details - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=agent_id, agent_version=agent_version, name=name, @@ -830,7 +829,7 @@ async def create_store_submission( logger.debug( f"Slug '{slug}' is already in use by another agent (agent_id: {agent_id}) for user {user_id}" ) - raise backend.server.v2.store.exceptions.SlugAlreadyInUseError( + raise store_exceptions.SlugAlreadyInUseError( f"The URL slug '{slug}' is already in use by another one of your agents. Please choose a different slug." ) from exc else: @@ -839,8 +838,8 @@ async def create_store_submission( f"Unique constraint violated (not slug): {error_str}" ) from exc except ( - backend.server.v2.store.exceptions.AgentNotFoundError, - backend.server.v2.store.exceptions.ListingExistsError, + store_exceptions.AgentNotFoundError, + store_exceptions.ListingExistsError, ): raise except prisma.errors.PrismaError as e: @@ -861,7 +860,7 @@ async def edit_store_submission( changes_summary: str | None = "Update submission", recommended_schedule_cron: str | None = None, instructions: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Edit an existing store listing submission. @@ -903,7 +902,7 @@ async def edit_store_submission( ) if not current_version: - raise backend.server.v2.store.exceptions.SubmissionNotFoundError( + raise store_exceptions.SubmissionNotFoundError( f"Store listing version not found: {store_listing_version_id}" ) @@ -912,7 +911,7 @@ async def edit_store_submission( not current_version.StoreListing or current_version.StoreListing.owningUserId != user_id ): - raise backend.server.v2.store.exceptions.UnauthorizedError( + raise store_exceptions.UnauthorizedError( f"User {user_id} does not own submission {store_listing_version_id}" ) @@ -921,7 +920,7 @@ async def edit_store_submission( # Check if we can edit this submission if current_version.submissionStatus == prisma.enums.SubmissionStatus.REJECTED: - raise backend.server.v2.store.exceptions.InvalidOperationError( + raise store_exceptions.InvalidOperationError( "Cannot edit a rejected submission" ) @@ -970,7 +969,7 @@ async def edit_store_submission( if not updated_version: raise DatabaseError("Failed to update store listing version") - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=current_version.agentGraphId, agent_version=current_version.agentGraphVersion, name=name, @@ -991,16 +990,16 @@ async def edit_store_submission( ) else: - raise backend.server.v2.store.exceptions.InvalidOperationError( + raise store_exceptions.InvalidOperationError( f"Cannot edit submission with status: {current_version.submissionStatus}" ) except ( - backend.server.v2.store.exceptions.SubmissionNotFoundError, - backend.server.v2.store.exceptions.UnauthorizedError, - backend.server.v2.store.exceptions.AgentNotFoundError, - backend.server.v2.store.exceptions.ListingExistsError, - backend.server.v2.store.exceptions.InvalidOperationError, + store_exceptions.SubmissionNotFoundError, + store_exceptions.UnauthorizedError, + store_exceptions.AgentNotFoundError, + store_exceptions.ListingExistsError, + store_exceptions.InvalidOperationError, ): raise except prisma.errors.PrismaError as e: @@ -1023,7 +1022,7 @@ async def create_store_version( categories: list[str] = [], changes_summary: str | None = "Initial submission", recommended_schedule_cron: str | None = None, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """ Create a new version for an existing store listing @@ -1056,7 +1055,7 @@ async def create_store_version( ) if not listing: - raise backend.server.v2.store.exceptions.ListingNotFoundError( + raise store_exceptions.ListingNotFoundError( f"Store listing not found. User ID: {user_id}, Listing ID: {store_listing_id}" ) @@ -1068,7 +1067,7 @@ async def create_store_version( ) if not agent: - raise backend.server.v2.store.exceptions.AgentNotFoundError( + raise store_exceptions.AgentNotFoundError( f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}" ) @@ -1103,7 +1102,7 @@ async def create_store_version( f"Created new version for listing {store_listing_id} of agent {agent_id}" ) # Return submission details - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=agent_id, agent_version=agent_version, name=name, @@ -1130,7 +1129,7 @@ async def create_store_review( store_listing_version_id: str, score: int, comments: str | None = None, -) -> backend.server.v2.store.model.StoreReview: +) -> store_model.StoreReview: """Create a review for a store listing as a user to detail their experience""" try: data = prisma.types.StoreListingReviewUpsertInput( @@ -1155,7 +1154,7 @@ async def create_store_review( data=data, ) - return backend.server.v2.store.model.StoreReview( + return store_model.StoreReview( score=review.score, comments=review.comments, ) @@ -1167,7 +1166,7 @@ async def create_store_review( async def get_user_profile( user_id: str, -) -> backend.server.v2.store.model.ProfileDetails | None: +) -> store_model.ProfileDetails | None: logger.debug(f"Getting user profile for {user_id}") try: @@ -1177,7 +1176,7 @@ async def get_user_profile( if not profile: return None - return backend.server.v2.store.model.ProfileDetails( + return store_model.ProfileDetails( name=profile.name, username=profile.username, description=profile.description, @@ -1190,8 +1189,8 @@ async def get_user_profile( async def update_profile( - user_id: str, profile: backend.server.v2.store.model.Profile -) -> backend.server.v2.store.model.CreatorDetails: + user_id: str, profile: store_model.Profile +) -> store_model.CreatorDetails: """ Update the store profile for a user or create a new one if it doesn't exist. Args: @@ -1214,7 +1213,7 @@ async def update_profile( where={"userId": user_id} ) if not existing_profile: - raise backend.server.v2.store.exceptions.ProfileNotFoundError( + raise store_exceptions.ProfileNotFoundError( f"Profile not found for user {user_id}. This should not be possible." ) @@ -1250,7 +1249,7 @@ async def update_profile( logger.error(f"Failed to update profile for user {user_id}") raise DatabaseError("Failed to update profile") - return backend.server.v2.store.model.CreatorDetails( + return store_model.CreatorDetails( name=updated_profile.name, username=updated_profile.username, description=updated_profile.description, @@ -1270,7 +1269,7 @@ async def get_my_agents( user_id: str, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.MyAgentsResponse: +) -> store_model.MyAgentsResponse: """Get the agents for the authenticated user""" logger.debug(f"Getting my agents for user {user_id}, page={page}") @@ -1307,7 +1306,7 @@ async def get_my_agents( total_pages = (total + page_size - 1) // page_size my_agents = [ - backend.server.v2.store.model.MyAgent( + store_model.MyAgent( agent_id=graph.id, agent_version=graph.version, agent_name=graph.name or "", @@ -1320,9 +1319,9 @@ async def get_my_agents( if (graph := library_agent.AgentGraph) ] - return backend.server.v2.store.model.MyAgentsResponse( + return store_model.MyAgentsResponse( agents=my_agents, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -1469,7 +1468,7 @@ async def review_store_submission( external_comments: str, internal_comments: str, reviewer_id: str, -) -> backend.server.v2.store.model.StoreSubmission: +) -> store_model.StoreSubmission: """Review a store listing submission as an admin.""" try: store_listing_version = ( @@ -1682,7 +1681,7 @@ async def review_store_submission( pass # Convert to Pydantic model for consistency - return backend.server.v2.store.model.StoreSubmission( + return store_model.StoreSubmission( agent_id=submission.agentGraphId, agent_version=submission.agentGraphVersion, name=submission.name, @@ -1717,7 +1716,7 @@ async def get_admin_listings_with_versions( search_query: str | None = None, page: int = 1, page_size: int = 20, -) -> backend.server.v2.store.model.StoreListingsWithVersionsResponse: +) -> store_model.StoreListingsWithVersionsResponse: """ Get store listings for admins with all their versions. @@ -1816,10 +1815,10 @@ async def get_admin_listings_with_versions( # Convert to response models listings_with_versions = [] for listing in listings: - versions: list[backend.server.v2.store.model.StoreSubmission] = [] + versions: list[store_model.StoreSubmission] = [] # If we have versions, turn them into StoreSubmission models for version in listing.Versions or []: - version_model = backend.server.v2.store.model.StoreSubmission( + version_model = store_model.StoreSubmission( agent_id=version.agentGraphId, agent_version=version.agentGraphVersion, name=version.name, @@ -1847,26 +1846,24 @@ async def get_admin_listings_with_versions( creator_email = listing.OwningUser.email if listing.OwningUser else None - listing_with_versions = ( - backend.server.v2.store.model.StoreListingWithVersions( - listing_id=listing.id, - slug=listing.slug, - agent_id=listing.agentGraphId, - agent_version=listing.agentGraphVersion, - active_version_id=listing.activeVersionId, - has_approved_version=listing.hasApprovedVersion, - creator_email=creator_email, - latest_version=latest_version, - versions=versions, - ) + listing_with_versions = store_model.StoreListingWithVersions( + listing_id=listing.id, + slug=listing.slug, + agent_id=listing.agentGraphId, + agent_version=listing.agentGraphVersion, + active_version_id=listing.activeVersionId, + has_approved_version=listing.hasApprovedVersion, + creator_email=creator_email, + latest_version=latest_version, + versions=versions, ) listings_with_versions.append(listing_with_versions) logger.debug(f"Found {len(listings_with_versions)} listings for admin") - return backend.server.v2.store.model.StoreListingsWithVersionsResponse( + return store_model.StoreListingsWithVersionsResponse( listings=listings_with_versions, - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=total, total_pages=total_pages, @@ -1876,9 +1873,9 @@ async def get_admin_listings_with_versions( except Exception as e: logger.error(f"Error fetching admin store listings: {e}") # Return empty response rather than exposing internal errors - return backend.server.v2.store.model.StoreListingsWithVersionsResponse( + return store_model.StoreListingsWithVersionsResponse( listings=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=page, total_items=0, total_pages=0, diff --git a/autogpt_platform/backend/backend/server/v2/store/db_test.py b/autogpt_platform/backend/backend/api/features/store/db_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/store/db_test.py rename to autogpt_platform/backend/backend/api/features/store/db_test.py index bf7cf39d6a..641f392d86 100644 --- a/autogpt_platform/backend/backend/server/v2/store/db_test.py +++ b/autogpt_platform/backend/backend/api/features/store/db_test.py @@ -6,8 +6,8 @@ import prisma.models import pytest from prisma import Prisma -import backend.server.v2.store.db as db -from backend.server.v2.store.model import Profile +from . import db +from .model import Profile @pytest.fixture(autouse=True) diff --git a/autogpt_platform/backend/backend/server/v2/store/exceptions.py b/autogpt_platform/backend/backend/api/features/store/exceptions.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/exceptions.py rename to autogpt_platform/backend/backend/api/features/store/exceptions.py diff --git a/autogpt_platform/backend/backend/server/v2/store/image_gen.py b/autogpt_platform/backend/backend/api/features/store/image_gen.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/image_gen.py rename to autogpt_platform/backend/backend/api/features/store/image_gen.py diff --git a/autogpt_platform/backend/backend/server/v2/store/media.py b/autogpt_platform/backend/backend/api/features/store/media.py similarity index 81% rename from autogpt_platform/backend/backend/server/v2/store/media.py rename to autogpt_platform/backend/backend/api/features/store/media.py index 88542dd2c8..cfdc71567a 100644 --- a/autogpt_platform/backend/backend/server/v2/store/media.py +++ b/autogpt_platform/backend/backend/api/features/store/media.py @@ -5,11 +5,12 @@ import uuid import fastapi from gcloud.aio import storage as async_storage -import backend.server.v2.store.exceptions from backend.util.exceptions import MissingConfigError from backend.util.settings import Settings from backend.util.virus_scanner import scan_content_safe +from . import exceptions as store_exceptions + logger = logging.getLogger(__name__) ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp"} @@ -68,61 +69,55 @@ async def upload_media( await file.seek(0) # Reset file pointer except Exception as e: logger.error(f"Error reading file content: {str(e)}") - raise backend.server.v2.store.exceptions.FileReadError( - "Failed to read file content" - ) from e + raise store_exceptions.FileReadError("Failed to read file content") from e # Validate file signature/magic bytes if file.content_type in ALLOWED_IMAGE_TYPES: # Check image file signatures if content.startswith(b"\xff\xd8\xff"): # JPEG if file.content_type != "image/jpeg": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"\x89PNG\r\n\x1a\n"): # PNG if file.content_type != "image/png": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"GIF87a") or content.startswith(b"GIF89a"): # GIF if file.content_type != "image/gif": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"RIFF") and content[8:12] == b"WEBP": # WebP if file.content_type != "image/webp": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) else: - raise backend.server.v2.store.exceptions.InvalidFileTypeError( - "Invalid image file signature" - ) + raise store_exceptions.InvalidFileTypeError("Invalid image file signature") elif file.content_type in ALLOWED_VIDEO_TYPES: # Check video file signatures if content.startswith(b"\x00\x00\x00") and (content[4:8] == b"ftyp"): # MP4 if file.content_type != "video/mp4": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) elif content.startswith(b"\x1a\x45\xdf\xa3"): # WebM if file.content_type != "video/webm": - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( "File signature does not match content type" ) else: - raise backend.server.v2.store.exceptions.InvalidFileTypeError( - "Invalid video file signature" - ) + raise store_exceptions.InvalidFileTypeError("Invalid video file signature") settings = Settings() # Check required settings first before doing any file processing if not settings.config.media_gcs_bucket_name: logger.error("Missing GCS bucket name setting") - raise backend.server.v2.store.exceptions.StorageConfigError( + raise store_exceptions.StorageConfigError( "Missing storage bucket configuration" ) @@ -137,7 +132,7 @@ async def upload_media( and content_type not in ALLOWED_VIDEO_TYPES ): logger.warning(f"Invalid file type attempted: {content_type}") - raise backend.server.v2.store.exceptions.InvalidFileTypeError( + raise store_exceptions.InvalidFileTypeError( f"File type not supported. Must be jpeg, png, gif, webp, mp4 or webm. Content type: {content_type}" ) @@ -150,16 +145,14 @@ async def upload_media( file_size += len(chunk) if file_size > MAX_FILE_SIZE: logger.warning(f"File size too large: {file_size} bytes") - raise backend.server.v2.store.exceptions.FileSizeTooLargeError( + raise store_exceptions.FileSizeTooLargeError( "File too large. Maximum size is 50MB" ) - except backend.server.v2.store.exceptions.FileSizeTooLargeError: + except store_exceptions.FileSizeTooLargeError: raise except Exception as e: logger.error(f"Error reading file chunks: {str(e)}") - raise backend.server.v2.store.exceptions.FileReadError( - "Failed to read uploaded file" - ) from e + raise store_exceptions.FileReadError("Failed to read uploaded file") from e # Reset file pointer await file.seek(0) @@ -198,14 +191,14 @@ async def upload_media( except Exception as e: logger.error(f"GCS storage error: {str(e)}") - raise backend.server.v2.store.exceptions.StorageUploadError( + raise store_exceptions.StorageUploadError( "Failed to upload file to storage" ) from e - except backend.server.v2.store.exceptions.MediaUploadError: + except store_exceptions.MediaUploadError: raise except Exception as e: logger.exception("Unexpected error in upload_media") - raise backend.server.v2.store.exceptions.MediaUploadError( + raise store_exceptions.MediaUploadError( "Unexpected error during media upload" ) from e diff --git a/autogpt_platform/backend/backend/server/v2/store/media_test.py b/autogpt_platform/backend/backend/api/features/store/media_test.py similarity index 75% rename from autogpt_platform/backend/backend/server/v2/store/media_test.py rename to autogpt_platform/backend/backend/api/features/store/media_test.py index 3722d2fdc3..7f3899c8a5 100644 --- a/autogpt_platform/backend/backend/server/v2/store/media_test.py +++ b/autogpt_platform/backend/backend/api/features/store/media_test.py @@ -6,17 +6,18 @@ import fastapi import pytest import starlette.datastructures -import backend.server.v2.store.exceptions -import backend.server.v2.store.media from backend.util.settings import Settings +from . import exceptions as store_exceptions +from . import media as store_media + @pytest.fixture def mock_settings(monkeypatch): settings = Settings() settings.config.media_gcs_bucket_name = "test-bucket" settings.config.google_application_credentials = "test-credentials" - monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings) + monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings) return settings @@ -32,12 +33,13 @@ def mock_storage_client(mocker): # Mock the constructor to return our mock client mocker.patch( - "backend.server.v2.store.media.async_storage.Storage", return_value=mock_client + "backend.api.features.store.media.async_storage.Storage", + return_value=mock_client, ) # Mock virus scanner to avoid actual scanning mocker.patch( - "backend.server.v2.store.media.scan_content_safe", new_callable=AsyncMock + "backend.api.features.store.media.scan_content_safe", new_callable=AsyncMock ) return mock_client @@ -53,7 +55,7 @@ async def test_upload_media_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" @@ -69,8 +71,8 @@ async def test_upload_media_invalid_type(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) mock_storage_client.upload.assert_not_called() @@ -79,7 +81,7 @@ async def test_upload_media_missing_credentials(monkeypatch): settings = Settings() settings.config.media_gcs_bucket_name = "" settings.config.google_application_credentials = "" - monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings) + monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings) test_file = fastapi.UploadFile( filename="laptop.jpeg", @@ -87,8 +89,8 @@ async def test_upload_media_missing_credentials(monkeypatch): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.StorageConfigError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.StorageConfigError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_video_type(mock_settings, mock_storage_client): @@ -98,7 +100,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "video/mp4"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/videos/" @@ -117,8 +119,8 @@ async def test_upload_media_file_too_large(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.FileSizeTooLargeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.FileSizeTooLargeError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_file_read_error(mock_settings, mock_storage_client): @@ -129,8 +131,8 @@ async def test_upload_media_file_read_error(mock_settings, mock_storage_client): ) test_file.read = unittest.mock.AsyncMock(side_effect=Exception("Read error")) - with pytest.raises(backend.server.v2.store.exceptions.FileReadError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.FileReadError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_png_success(mock_settings, mock_storage_client): @@ -140,7 +142,7 @@ async def test_upload_media_png_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/png"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -154,7 +156,7 @@ async def test_upload_media_gif_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/gif"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -168,7 +170,7 @@ async def test_upload_media_webp_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "image/webp"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/images/" ) @@ -182,7 +184,7 @@ async def test_upload_media_webm_success(mock_settings, mock_storage_client): headers=starlette.datastructures.Headers({"content-type": "video/webm"}), ) - result = await backend.server.v2.store.media.upload_media("test-user", test_file) + result = await store_media.upload_media("test-user", test_file) assert result.startswith( "https://storage.googleapis.com/test-bucket/users/test-user/videos/" ) @@ -196,8 +198,8 @@ async def test_upload_media_mismatched_signature(mock_settings, mock_storage_cli headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) async def test_upload_media_invalid_signature(mock_settings, mock_storage_client): @@ -207,5 +209,5 @@ async def test_upload_media_invalid_signature(mock_settings, mock_storage_client headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}), ) - with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError): - await backend.server.v2.store.media.upload_media("test-user", test_file) + with pytest.raises(store_exceptions.InvalidFileTypeError): + await store_media.upload_media("test-user", test_file) diff --git a/autogpt_platform/backend/backend/server/v2/store/model.py b/autogpt_platform/backend/backend/api/features/store/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/store/model.py rename to autogpt_platform/backend/backend/api/features/store/model.py diff --git a/autogpt_platform/backend/backend/server/v2/store/model_test.py b/autogpt_platform/backend/backend/api/features/store/model_test.py similarity index 83% rename from autogpt_platform/backend/backend/server/v2/store/model_test.py rename to autogpt_platform/backend/backend/api/features/store/model_test.py index c387dfdecb..3633e6549e 100644 --- a/autogpt_platform/backend/backend/server/v2/store/model_test.py +++ b/autogpt_platform/backend/backend/api/features/store/model_test.py @@ -2,11 +2,11 @@ import datetime import prisma.enums -import backend.server.v2.store.model +from . import model as store_model def test_pagination(): - pagination = backend.server.v2.store.model.Pagination( + pagination = store_model.Pagination( total_items=100, total_pages=5, current_page=2, page_size=20 ) assert pagination.total_items == 100 @@ -16,7 +16,7 @@ def test_pagination(): def test_store_agent(): - agent = backend.server.v2.store.model.StoreAgent( + agent = store_model.StoreAgent( slug="test-agent", agent_name="Test Agent", agent_image="test.jpg", @@ -34,9 +34,9 @@ def test_store_agent(): def test_store_agents_response(): - response = backend.server.v2.store.model.StoreAgentsResponse( + response = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="test-agent", agent_name="Test Agent", agent_image="test.jpg", @@ -48,7 +48,7 @@ def test_store_agents_response(): rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -57,7 +57,7 @@ def test_store_agents_response(): def test_store_agent_details(): - details = backend.server.v2.store.model.StoreAgentDetails( + details = store_model.StoreAgentDetails( store_listing_version_id="version123", slug="test-agent", agent_name="Test Agent", @@ -81,7 +81,7 @@ def test_store_agent_details(): def test_creator(): - creator = backend.server.v2.store.model.Creator( + creator = store_model.Creator( agent_rating=4.8, agent_runs=1000, name="Test Creator", @@ -96,9 +96,9 @@ def test_creator(): def test_creators_response(): - response = backend.server.v2.store.model.CreatorsResponse( + response = store_model.CreatorsResponse( creators=[ - backend.server.v2.store.model.Creator( + store_model.Creator( agent_rating=4.8, agent_runs=1000, name="Test Creator", @@ -109,7 +109,7 @@ def test_creators_response(): is_featured=False, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -118,7 +118,7 @@ def test_creators_response(): def test_creator_details(): - details = backend.server.v2.store.model.CreatorDetails( + details = store_model.CreatorDetails( name="Test Creator", username="creator1", description="Test description", @@ -135,7 +135,7 @@ def test_creator_details(): def test_store_submission(): - submission = backend.server.v2.store.model.StoreSubmission( + submission = store_model.StoreSubmission( agent_id="agent123", agent_version=1, sub_heading="Test subheading", @@ -154,9 +154,9 @@ def test_store_submission(): def test_store_submissions_response(): - response = backend.server.v2.store.model.StoreSubmissionsResponse( + response = store_model.StoreSubmissionsResponse( submissions=[ - backend.server.v2.store.model.StoreSubmission( + store_model.StoreSubmission( agent_id="agent123", agent_version=1, sub_heading="Test subheading", @@ -170,7 +170,7 @@ def test_store_submissions_response(): rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( total_items=1, total_pages=1, current_page=1, page_size=20 ), ) @@ -179,7 +179,7 @@ def test_store_submissions_response(): def test_store_submission_request(): - request = backend.server.v2.store.model.StoreSubmissionRequest( + request = store_model.StoreSubmissionRequest( agent_id="agent123", agent_version=1, slug="test-agent", diff --git a/autogpt_platform/backend/backend/server/v2/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py similarity index 87% rename from autogpt_platform/backend/backend/server/v2/store/routes.py rename to autogpt_platform/backend/backend/api/features/store/routes.py index b0c1df6e22..6a9bb05291 100644 --- a/autogpt_platform/backend/backend/server/v2/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -9,14 +9,14 @@ import fastapi import fastapi.responses import backend.data.graph -import backend.server.v2.store.cache as store_cache -import backend.server.v2.store.db -import backend.server.v2.store.exceptions -import backend.server.v2.store.image_gen -import backend.server.v2.store.media -import backend.server.v2.store.model import backend.util.json +from . import cache as store_cache +from . import db as store_db +from . import image_gen as store_image_gen +from . import media as store_media +from . import model as store_model + logger = logging.getLogger(__name__) router = fastapi.APIRouter() @@ -32,7 +32,7 @@ router = fastapi.APIRouter() summary="Get user profile", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.ProfileDetails, + response_model=store_model.ProfileDetails, ) async def get_profile( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -41,7 +41,7 @@ async def get_profile( Get the profile details for the authenticated user. Cached for 1 hour per user. """ - profile = await backend.server.v2.store.db.get_user_profile(user_id) + profile = await store_db.get_user_profile(user_id) if profile is None: return fastapi.responses.JSONResponse( status_code=404, @@ -55,10 +55,10 @@ async def get_profile( summary="Update user profile", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.CreatorDetails, + response_model=store_model.CreatorDetails, ) async def update_or_create_profile( - profile: backend.server.v2.store.model.Profile, + profile: store_model.Profile, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -74,9 +74,7 @@ async def update_or_create_profile( Raises: HTTPException: If there is an error updating the profile """ - updated_profile = await backend.server.v2.store.db.update_profile( - user_id=user_id, profile=profile - ) + updated_profile = await store_db.update_profile(user_id=user_id, profile=profile) return updated_profile @@ -89,7 +87,7 @@ async def update_or_create_profile( "/agents", summary="List store agents", tags=["store", "public"], - response_model=backend.server.v2.store.model.StoreAgentsResponse, + response_model=store_model.StoreAgentsResponse, ) async def get_agents( featured: bool = False, @@ -152,7 +150,7 @@ async def get_agents( "/agents/{username}/{agent_name}", summary="Get specific agent", tags=["store", "public"], - response_model=backend.server.v2.store.model.StoreAgentDetails, + response_model=store_model.StoreAgentDetails, ) async def get_agent(username: str, agent_name: str): """ @@ -179,9 +177,7 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s """ Get Agent Graph from Store Listing Version ID. """ - graph = await backend.server.v2.store.db.get_available_graph( - store_listing_version_id - ) + graph = await store_db.get_available_graph(store_listing_version_id) return graph @@ -190,15 +186,13 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s summary="Get agent by version", tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreAgentDetails, + response_model=store_model.StoreAgentDetails, ) async def get_store_agent(store_listing_version_id: str): """ Get Store Agent Details from Store Listing Version ID. """ - agent = await backend.server.v2.store.db.get_store_agent_by_version_id( - store_listing_version_id - ) + agent = await store_db.get_store_agent_by_version_id(store_listing_version_id) return agent @@ -208,12 +202,12 @@ async def get_store_agent(store_listing_version_id: str): summary="Create agent review", tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreReview, + response_model=store_model.StoreReview, ) async def create_review( username: str, agent_name: str, - review: backend.server.v2.store.model.StoreReviewCreate, + review: store_model.StoreReviewCreate, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -231,7 +225,7 @@ async def create_review( username = urllib.parse.unquote(username).lower() agent_name = urllib.parse.unquote(agent_name).lower() # Create the review - created_review = await backend.server.v2.store.db.create_store_review( + created_review = await store_db.create_store_review( user_id=user_id, store_listing_version_id=review.store_listing_version_id, score=review.score, @@ -250,7 +244,7 @@ async def create_review( "/creators", summary="List store creators", tags=["store", "public"], - response_model=backend.server.v2.store.model.CreatorsResponse, + response_model=store_model.CreatorsResponse, ) async def get_creators( featured: bool = False, @@ -295,7 +289,7 @@ async def get_creators( "/creator/{username}", summary="Get creator details", tags=["store", "public"], - response_model=backend.server.v2.store.model.CreatorDetails, + response_model=store_model.CreatorDetails, ) async def get_creator( username: str, @@ -319,7 +313,7 @@ async def get_creator( summary="Get my agents", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.MyAgentsResponse, + response_model=store_model.MyAgentsResponse, ) async def get_my_agents( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -329,9 +323,7 @@ async def get_my_agents( """ Get user's own agents. """ - agents = await backend.server.v2.store.db.get_my_agents( - user_id, page=page, page_size=page_size - ) + agents = await store_db.get_my_agents(user_id, page=page, page_size=page_size) return agents @@ -356,7 +348,7 @@ async def delete_submission( Returns: bool: True if the submission was successfully deleted, False otherwise """ - result = await backend.server.v2.store.db.delete_store_submission( + result = await store_db.delete_store_submission( user_id=user_id, submission_id=submission_id, ) @@ -369,7 +361,7 @@ async def delete_submission( summary="List my submissions", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmissionsResponse, + response_model=store_model.StoreSubmissionsResponse, ) async def get_submissions( user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), @@ -399,7 +391,7 @@ async def get_submissions( raise fastapi.HTTPException( status_code=422, detail="Page size must be greater than 0" ) - listings = await backend.server.v2.store.db.get_store_submissions( + listings = await store_db.get_store_submissions( user_id=user_id, page=page, page_size=page_size, @@ -412,10 +404,10 @@ async def get_submissions( summary="Create store submission", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def create_submission( - submission_request: backend.server.v2.store.model.StoreSubmissionRequest, + submission_request: store_model.StoreSubmissionRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -431,7 +423,7 @@ async def create_submission( Raises: HTTPException: If there is an error creating the submission """ - result = await backend.server.v2.store.db.create_store_submission( + result = await store_db.create_store_submission( user_id=user_id, agent_id=submission_request.agent_id, agent_version=submission_request.agent_version, @@ -456,11 +448,11 @@ async def create_submission( summary="Edit store submission", tags=["store", "private"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], - response_model=backend.server.v2.store.model.StoreSubmission, + response_model=store_model.StoreSubmission, ) async def edit_submission( store_listing_version_id: str, - submission_request: backend.server.v2.store.model.StoreSubmissionEditRequest, + submission_request: store_model.StoreSubmissionEditRequest, user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id), ): """ @@ -477,7 +469,7 @@ async def edit_submission( Raises: HTTPException: If there is an error editing the submission """ - result = await backend.server.v2.store.db.edit_store_submission( + result = await store_db.edit_store_submission( user_id=user_id, store_listing_version_id=store_listing_version_id, name=submission_request.name, @@ -518,9 +510,7 @@ async def upload_submission_media( Raises: HTTPException: If there is an error uploading the media """ - media_url = await backend.server.v2.store.media.upload_media( - user_id=user_id, file=file - ) + media_url = await store_media.upload_media(user_id=user_id, file=file) return media_url @@ -555,14 +545,12 @@ async def generate_image( # Use .jpeg here since we are generating JPEG images filename = f"agent_{agent_id}.jpeg" - existing_url = await backend.server.v2.store.media.check_media_exists( - user_id, filename - ) + existing_url = await store_media.check_media_exists(user_id, filename) if existing_url: logger.info(f"Using existing image for agent {agent_id}") return fastapi.responses.JSONResponse(content={"image_url": existing_url}) # Generate agent image as JPEG - image = await backend.server.v2.store.image_gen.generate_agent_image(agent=agent) + image = await store_image_gen.generate_agent_image(agent=agent) # Create UploadFile with the correct filename and content_type image_file = fastapi.UploadFile( @@ -570,7 +558,7 @@ async def generate_image( filename=filename, ) - image_url = await backend.server.v2.store.media.upload_media( + image_url = await store_media.upload_media( user_id=user_id, file=image_file, use_file_name=True ) @@ -599,7 +587,7 @@ async def download_agent_file( Raises: HTTPException: If the agent is not found or an unexpected error occurs. """ - graph_data = await backend.server.v2.store.db.get_agent(store_listing_version_id) + graph_data = await store_db.get_agent(store_listing_version_id) file_name = f"agent_{graph_data.id}_v{graph_data.version or 'latest'}.json" # Sending graph as a stream (similar to marketplace v1) diff --git a/autogpt_platform/backend/backend/server/v2/store/routes_test.py b/autogpt_platform/backend/backend/api/features/store/routes_test.py similarity index 76% rename from autogpt_platform/backend/backend/server/v2/store/routes_test.py rename to autogpt_platform/backend/backend/api/features/store/routes_test.py index 03322ee988..b9c040c149 100644 --- a/autogpt_platform/backend/backend/server/v2/store/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/store/routes_test.py @@ -8,15 +8,15 @@ import pytest import pytest_mock from pytest_snapshot.plugin import Snapshot -import backend.server.v2.store.model -import backend.server.v2.store.routes +from . import model as store_model +from . import routes as store_routes # Using a fixed timestamp for reproducible tests # 2023 date is intentionally used to ensure tests work regardless of current year FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0) app = fastapi.FastAPI() -app.include_router(backend.server.v2.store.routes.router) +app.include_router(store_routes.router) client = fastapi.testclient.TestClient(app) @@ -35,23 +35,21 @@ def test_get_agents_defaults( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=0, total_items=0, total_pages=0, page_size=10, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert data.pagination.total_pages == 0 assert data.agents == [] @@ -72,9 +70,9 @@ def test_get_agents_featured( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="featured-agent", agent_name="Featured Agent", agent_image="featured.jpg", @@ -86,20 +84,18 @@ def test_get_agents_featured( rating=4.5, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?featured=true") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].slug == "featured-agent" snapshot.snapshot_dir = "snapshots" @@ -119,9 +115,9 @@ def test_get_agents_by_creator( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="creator-agent", agent_name="Creator Agent", agent_image="agent.jpg", @@ -133,20 +129,18 @@ def test_get_agents_by_creator( rating=4.0, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?creator=specific-creator") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].creator == "specific-creator" snapshot.snapshot_dir = "snapshots" @@ -166,9 +160,9 @@ def test_get_agents_sorted( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="top-agent", agent_name="Top Agent", agent_image="top.jpg", @@ -180,20 +174,18 @@ def test_get_agents_sorted( rating=5.0, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?sorted_by=runs") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert data.agents[0].runs == 1000 snapshot.snapshot_dir = "snapshots" @@ -213,9 +205,9 @@ def test_get_agents_search( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="search-agent", agent_name="Search Agent", agent_image="search.jpg", @@ -227,20 +219,18 @@ def test_get_agents_search( rating=4.2, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?search_query=specific") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 assert "specific" in data.agents[0].description.lower() snapshot.snapshot_dir = "snapshots" @@ -260,9 +250,9 @@ def test_get_agents_category( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug="category-agent", agent_name="Category Agent", agent_image="category.jpg", @@ -274,20 +264,18 @@ def test_get_agents_category( rating=4.1, ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?category=test-category") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 1 snapshot.snapshot_dir = "snapshots" snapshot.assert_match(json.dumps(response.json(), indent=2), "agts_category") @@ -306,9 +294,9 @@ def test_get_agents_pagination( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentsResponse( + mocked_value = store_model.StoreAgentsResponse( agents=[ - backend.server.v2.store.model.StoreAgent( + store_model.StoreAgent( slug=f"agent-{i}", agent_name=f"Agent {i}", agent_image=f"agent{i}.jpg", @@ -321,20 +309,18 @@ def test_get_agents_pagination( ) for i in range(5) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=15, total_pages=3, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.return_value = mocked_value response = client.get("/agents?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentsResponse.model_validate( - response.json() - ) + data = store_model.StoreAgentsResponse.model_validate(response.json()) assert len(data.agents) == 5 assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 @@ -365,7 +351,7 @@ def test_get_agents_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents") mock_db_call.assert_not_called() @@ -373,7 +359,7 @@ def test_get_agent_details( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.StoreAgentDetails( + mocked_value = store_model.StoreAgentDetails( store_listing_version_id="test-version-id", slug="test-agent", agent_name="Test Agent", @@ -390,15 +376,13 @@ def test_get_agent_details( versions=["1.0.0", "1.1.0"], last_updated=FIXED_NOW, ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agent_details") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agent_details") mock_db_call.return_value = mocked_value response = client.get("/agents/creator1/test-agent") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreAgentDetails.model_validate( - response.json() - ) + data = store_model.StoreAgentDetails.model_validate(response.json()) assert data.agent_name == "Test Agent" assert data.creator == "creator1" snapshot.snapshot_dir = "snapshots" @@ -410,24 +394,22 @@ def test_get_creators_defaults( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorsResponse( + mocked_value = store_model.CreatorsResponse( creators=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=0, total_items=0, total_pages=0, page_size=10, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.return_value = mocked_value response = client.get("/creators") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorsResponse.model_validate( - response.json() - ) + data = store_model.CreatorsResponse.model_validate(response.json()) assert data.pagination.total_pages == 0 assert data.creators == [] snapshot.snapshot_dir = "snapshots" @@ -441,9 +423,9 @@ def test_get_creators_pagination( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorsResponse( + mocked_value = store_model.CreatorsResponse( creators=[ - backend.server.v2.store.model.Creator( + store_model.Creator( name=f"Creator {i}", username=f"creator{i}", description=f"Creator {i} description", @@ -455,22 +437,20 @@ def test_get_creators_pagination( ) for i in range(5) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=15, total_pages=3, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.return_value = mocked_value response = client.get("/creators?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorsResponse.model_validate( - response.json() - ) + data = store_model.CreatorsResponse.model_validate(response.json()) assert len(data.creators) == 5 assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 @@ -495,7 +475,7 @@ def test_get_creators_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators") mock_db_call.assert_not_called() @@ -503,7 +483,7 @@ def test_get_creator_details( mocker: pytest_mock.MockFixture, snapshot: Snapshot, ) -> None: - mocked_value = backend.server.v2.store.model.CreatorDetails( + mocked_value = store_model.CreatorDetails( name="Test User", username="creator1", description="Test creator description", @@ -513,13 +493,15 @@ def test_get_creator_details( agent_runs=1000, top_categories=["category1", "category2"], ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creator_details") + mock_db_call = mocker.patch( + "backend.api.features.store.db.get_store_creator_details" + ) mock_db_call.return_value = mocked_value response = client.get("/creator/creator1") assert response.status_code == 200 - data = backend.server.v2.store.model.CreatorDetails.model_validate(response.json()) + data = store_model.CreatorDetails.model_validate(response.json()) assert data.username == "creator1" assert data.name == "Test User" snapshot.snapshot_dir = "snapshots" @@ -532,9 +514,9 @@ def test_get_submissions_success( snapshot: Snapshot, test_user_id: str, ) -> None: - mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse( + mocked_value = store_model.StoreSubmissionsResponse( submissions=[ - backend.server.v2.store.model.StoreSubmission( + store_model.StoreSubmission( name="Test Agent", description="Test agent description", image_urls=["test.jpg"], @@ -550,22 +532,20 @@ def test_get_submissions_success( categories=["test-category"], ) ], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=1, total_items=1, total_pages=1, page_size=20, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.return_value = mocked_value response = client.get("/submissions") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate( - response.json() - ) + data = store_model.StoreSubmissionsResponse.model_validate(response.json()) assert len(data.submissions) == 1 assert data.submissions[0].name == "Test Agent" assert data.pagination.current_page == 1 @@ -579,24 +559,22 @@ def test_get_submissions_pagination( snapshot: Snapshot, test_user_id: str, ) -> None: - mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse( + mocked_value = store_model.StoreSubmissionsResponse( submissions=[], - pagination=backend.server.v2.store.model.Pagination( + pagination=store_model.Pagination( current_page=2, total_items=10, total_pages=2, page_size=5, ), ) - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.return_value = mocked_value response = client.get("/submissions?page=2&page_size=5") assert response.status_code == 200 - data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate( - response.json() - ) + data = store_model.StoreSubmissionsResponse.model_validate(response.json()) assert data.pagination.current_page == 2 assert data.pagination.page_size == 5 snapshot.snapshot_dir = "snapshots" @@ -618,5 +596,5 @@ def test_get_submissions_malformed_request(mocker: pytest_mock.MockFixture): assert response.status_code == 422 # Verify no DB calls were made - mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions") + mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions") mock_db_call.assert_not_called() diff --git a/autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py b/autogpt_platform/backend/backend/api/features/store/test_cache_delete.py similarity index 96% rename from autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py rename to autogpt_platform/backend/backend/api/features/store/test_cache_delete.py index 4111de0ee8..dd9be1f4ab 100644 --- a/autogpt_platform/backend/backend/server/v2/store/test_cache_delete.py +++ b/autogpt_platform/backend/backend/api/features/store/test_cache_delete.py @@ -8,10 +8,11 @@ from unittest.mock import AsyncMock, patch import pytest -from backend.server.v2.store import cache as store_cache -from backend.server.v2.store.model import StoreAgent, StoreAgentsResponse from backend.util.models import Pagination +from . import cache as store_cache +from .model import StoreAgent, StoreAgentsResponse + class TestCacheDeletion: """Test cache deletion functionality for store routes.""" @@ -43,7 +44,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ) as mock_db: @@ -152,7 +153,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ): @@ -203,7 +204,7 @@ class TestCacheDeletion: ) with patch( - "backend.server.v2.store.db.get_store_agents", + "backend.api.features.store.db.get_store_agents", new_callable=AsyncMock, return_value=mock_response, ) as mock_db: diff --git a/autogpt_platform/backend/backend/server/routers/v1.py b/autogpt_platform/backend/backend/api/features/v1.py similarity index 98% rename from autogpt_platform/backend/backend/server/routers/v1.py rename to autogpt_platform/backend/backend/api/features/v1.py index e5e74690f8..9b05b4755f 100644 --- a/autogpt_platform/backend/backend/server/routers/v1.py +++ b/autogpt_platform/backend/backend/api/features/v1.py @@ -28,9 +28,18 @@ from pydantic import BaseModel from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND from typing_extensions import Optional, TypedDict -import backend.server.integrations.router -import backend.server.routers.analytics -import backend.server.v2.library.db as library_db +from backend.api.model import ( + CreateAPIKeyRequest, + CreateAPIKeyResponse, + CreateGraph, + GraphExecutionSource, + RequestTopUp, + SetGraphActiveVersion, + TimezoneResponse, + UpdatePermissionsRequest, + UpdateTimezoneRequest, + UploadFileResponse, +) from backend.data import execution as execution_db from backend.data import graph as graph_db from backend.data.auth import api_key as api_key_db @@ -79,19 +88,6 @@ from backend.monitoring.instrumentation import ( record_graph_execution, record_graph_operation, ) -from backend.server.model import ( - CreateAPIKeyRequest, - CreateAPIKeyResponse, - CreateGraph, - GraphExecutionSource, - RequestTopUp, - SetGraphActiveVersion, - TimezoneResponse, - UpdatePermissionsRequest, - UpdateTimezoneRequest, - UploadFileResponse, -) -from backend.server.v2.store.model import StoreAgentDetails from backend.util.cache import cached from backend.util.clients import get_scheduler_client from backend.util.cloud_storage import get_cloud_storage_handler @@ -105,6 +101,10 @@ from backend.util.timezone_utils import ( ) from backend.util.virus_scanner import scan_content_safe +from .library import db as library_db +from .library import model as library_model +from .store.model import StoreAgentDetails + def _create_file_size_error(size_bytes: int, max_size_mb: int) -> HTTPException: """Create standardized file size error response.""" @@ -118,76 +118,9 @@ settings = Settings() logger = logging.getLogger(__name__) -async def hide_activity_summaries_if_disabled( - executions: list[execution_db.GraphExecutionMeta], user_id: str -) -> list[execution_db.GraphExecutionMeta]: - """Hide activity summaries and scores if AI_ACTIVITY_STATUS feature is disabled.""" - if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): - return executions # Return as-is if feature is enabled - - # Filter out activity features if disabled - filtered_executions = [] - for execution in executions: - if execution.stats: - filtered_stats = execution.stats.without_activity_features() - execution = execution.model_copy(update={"stats": filtered_stats}) - filtered_executions.append(execution) - return filtered_executions - - -async def hide_activity_summary_if_disabled( - execution: execution_db.GraphExecution | execution_db.GraphExecutionWithNodes, - user_id: str, -) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes: - """Hide activity summary and score for a single execution if AI_ACTIVITY_STATUS feature is disabled.""" - if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): - return execution # Return as-is if feature is enabled - - # Filter out activity features if disabled - if execution.stats: - filtered_stats = execution.stats.without_activity_features() - return execution.model_copy(update={"stats": filtered_stats}) - return execution - - -async def _update_library_agent_version_and_settings( - user_id: str, agent_graph: graph_db.GraphModel -) -> library_db.library_model.LibraryAgent: - # Keep the library agent up to date with the new active version - library = await library_db.update_agent_version_in_library( - user_id, agent_graph.id, agent_graph.version - ) - # If the graph has HITL node, initialize the setting if it's not already set. - if ( - agent_graph.has_human_in_the_loop - and library.settings.human_in_the_loop_safe_mode is None - ): - await library_db.update_library_agent_settings( - user_id=user_id, - agent_id=library.id, - settings=library.settings.model_copy( - update={"human_in_the_loop_safe_mode": True} - ), - ) - return library - - # Define the API routes v1_router = APIRouter() -v1_router.include_router( - backend.server.integrations.router.router, - prefix="/integrations", - tags=["integrations"], -) - -v1_router.include_router( - backend.server.routers.analytics.router, - prefix="/analytics", - tags=["analytics"], - dependencies=[Security(requires_user)], -) - ######################################################## ##################### Auth ############################# @@ -953,6 +886,28 @@ async def set_graph_active_version( await on_graph_deactivate(current_active_graph, user_id=user_id) +async def _update_library_agent_version_and_settings( + user_id: str, agent_graph: graph_db.GraphModel +) -> library_model.LibraryAgent: + # Keep the library agent up to date with the new active version + library = await library_db.update_agent_version_in_library( + user_id, agent_graph.id, agent_graph.version + ) + # If the graph has HITL node, initialize the setting if it's not already set. + if ( + agent_graph.has_human_in_the_loop + and library.settings.human_in_the_loop_safe_mode is None + ): + await library_db.update_library_agent_settings( + user_id=user_id, + agent_id=library.id, + settings=library.settings.model_copy( + update={"human_in_the_loop_safe_mode": True} + ), + ) + return library + + @v1_router.patch( path="/graphs/{graph_id}/settings", summary="Update graph settings", @@ -1155,6 +1110,23 @@ async def list_graph_executions( ) +async def hide_activity_summaries_if_disabled( + executions: list[execution_db.GraphExecutionMeta], user_id: str +) -> list[execution_db.GraphExecutionMeta]: + """Hide activity summaries and scores if AI_ACTIVITY_STATUS feature is disabled.""" + if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): + return executions # Return as-is if feature is enabled + + # Filter out activity features if disabled + filtered_executions = [] + for execution in executions: + if execution.stats: + filtered_stats = execution.stats.without_activity_features() + execution = execution.model_copy(update={"stats": filtered_stats}) + filtered_executions.append(execution) + return filtered_executions + + @v1_router.get( path="/graphs/{graph_id}/executions/{graph_exec_id}", summary="Get execution details", @@ -1197,6 +1169,21 @@ async def get_graph_execution( return result +async def hide_activity_summary_if_disabled( + execution: execution_db.GraphExecution | execution_db.GraphExecutionWithNodes, + user_id: str, +) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes: + """Hide activity summary and score for a single execution if AI_ACTIVITY_STATUS feature is disabled.""" + if await is_feature_enabled(Flag.AI_ACTIVITY_STATUS, user_id): + return execution # Return as-is if feature is enabled + + # Filter out activity features if disabled + if execution.stats: + filtered_stats = execution.stats.without_activity_features() + return execution.model_copy(update={"stats": filtered_stats}) + return execution + + @v1_router.delete( path="/executions/{graph_exec_id}", summary="Delete graph execution", @@ -1257,7 +1244,7 @@ async def enable_execution_sharing( ) # Return the share URL - frontend_url = Settings().config.frontend_base_url or "http://localhost:3000" + frontend_url = settings.config.frontend_base_url or "http://localhost:3000" share_url = f"{frontend_url}/share/{share_token}" return ShareResponse(share_url=share_url, share_token=share_token) diff --git a/autogpt_platform/backend/backend/server/routers/v1_test.py b/autogpt_platform/backend/backend/api/features/v1_test.py similarity index 91% rename from autogpt_platform/backend/backend/server/routers/v1_test.py rename to autogpt_platform/backend/backend/api/features/v1_test.py index 69e1b5f2ae..a186d38810 100644 --- a/autogpt_platform/backend/backend/server/routers/v1_test.py +++ b/autogpt_platform/backend/backend/api/features/v1_test.py @@ -11,13 +11,13 @@ import starlette.datastructures from fastapi import HTTPException, UploadFile from pytest_snapshot.plugin import Snapshot -import backend.server.routers.v1 as v1_routes from backend.data.credit import AutoTopUpConfig from backend.data.graph import GraphModel -from backend.server.routers.v1 import upload_file + +from .v1 import upload_file, v1_router app = fastapi.FastAPI() -app.include_router(v1_routes.v1_router) +app.include_router(v1_router) client = fastapi.testclient.TestClient(app) @@ -50,7 +50,7 @@ def test_get_or_create_user_route( } mocker.patch( - "backend.server.routers.v1.get_or_create_user", + "backend.api.features.v1.get_or_create_user", return_value=mock_user, ) @@ -71,7 +71,7 @@ def test_update_user_email_route( ) -> None: """Test update user email endpoint""" mocker.patch( - "backend.server.routers.v1.update_user_email", + "backend.api.features.v1.update_user_email", return_value=None, ) @@ -107,7 +107,7 @@ def test_get_graph_blocks( # Mock get_blocks mocker.patch( - "backend.server.routers.v1.get_blocks", + "backend.api.features.v1.get_blocks", return_value={"test-block": lambda: mock_block}, ) @@ -146,7 +146,7 @@ def test_execute_graph_block( mock_block.execute = mock_execute mocker.patch( - "backend.server.routers.v1.get_block", + "backend.api.features.v1.get_block", return_value=mock_block, ) @@ -155,7 +155,7 @@ def test_execute_graph_block( mock_user.timezone = "UTC" mocker.patch( - "backend.server.routers.v1.get_user_by_id", + "backend.api.features.v1.get_user_by_id", return_value=mock_user, ) @@ -181,7 +181,7 @@ def test_execute_graph_block_not_found( ) -> None: """Test execute block with non-existent block""" mocker.patch( - "backend.server.routers.v1.get_block", + "backend.api.features.v1.get_block", return_value=None, ) @@ -200,7 +200,7 @@ def test_get_user_credits( mock_credit_model = Mock() mock_credit_model.get_credits = AsyncMock(return_value=1000) mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -227,7 +227,7 @@ def test_request_top_up( return_value="https://checkout.example.com/session123" ) mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -254,7 +254,7 @@ def test_get_auto_top_up( mock_config = AutoTopUpConfig(threshold=100, amount=500) mocker.patch( - "backend.server.routers.v1.get_auto_top_up", + "backend.api.features.v1.get_auto_top_up", return_value=mock_config, ) @@ -279,7 +279,7 @@ def test_configure_auto_top_up( """Test configure auto top-up endpoint - this test would have caught the enum casting bug""" # Mock the set_auto_top_up function to avoid database operations mocker.patch( - "backend.server.routers.v1.set_auto_top_up", + "backend.api.features.v1.set_auto_top_up", return_value=None, ) @@ -289,7 +289,7 @@ def test_configure_auto_top_up( mock_credit_model.top_up_credits.return_value = None mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -311,7 +311,7 @@ def test_configure_auto_top_up_validation_errors( ) -> None: """Test configure auto top-up endpoint validation""" # Mock set_auto_top_up to avoid database operations for successful case - mocker.patch("backend.server.routers.v1.set_auto_top_up") + mocker.patch("backend.api.features.v1.set_auto_top_up") # Mock credit model to avoid Stripe API calls for the successful case mock_credit_model = mocker.AsyncMock() @@ -319,7 +319,7 @@ def test_configure_auto_top_up_validation_errors( mock_credit_model.top_up_credits.return_value = None mocker.patch( - "backend.server.routers.v1.get_user_credit_model", + "backend.api.features.v1.get_user_credit_model", return_value=mock_credit_model, ) @@ -393,7 +393,7 @@ def test_get_graph( ) mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=mock_graph, ) @@ -415,7 +415,7 @@ def test_get_graph_not_found( ) -> None: """Test get graph with non-existent ID""" mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=None, ) @@ -443,15 +443,15 @@ def test_delete_graph( ) mocker.patch( - "backend.server.routers.v1.graph_db.get_graph", + "backend.api.features.v1.graph_db.get_graph", return_value=mock_graph, ) mocker.patch( - "backend.server.routers.v1.on_graph_deactivate", + "backend.api.features.v1.on_graph_deactivate", return_value=None, ) mocker.patch( - "backend.server.routers.v1.graph_db.delete_graph", + "backend.api.features.v1.graph_db.delete_graph", return_value=3, # Number of versions deleted ) @@ -498,8 +498,8 @@ async def test_upload_file_success(test_user_id: str): ) # Mock dependencies - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -550,8 +550,8 @@ async def test_upload_file_no_filename(test_user_id: str): ), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -610,7 +610,7 @@ async def test_upload_file_virus_scan_failure(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan: + with patch("backend.api.features.v1.scan_content_safe") as mock_scan: # Mock virus scan to raise exception mock_scan.side_effect = RuntimeError("Virus detected!") @@ -631,8 +631,8 @@ async def test_upload_file_cloud_storage_failure(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None @@ -678,8 +678,8 @@ async def test_upload_file_gcs_not_configured_fallback(test_user_id: str): headers=starlette.datastructures.Headers({"content-type": "text/plain"}), ) - with patch("backend.server.routers.v1.scan_content_safe") as mock_scan, patch( - "backend.server.routers.v1.get_cloud_storage_handler" + with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch( + "backend.api.features.v1.get_cloud_storage_handler" ) as mock_handler_getter: mock_scan.return_value = None diff --git a/autogpt_platform/backend/backend/server/middleware/security.py b/autogpt_platform/backend/backend/api/middleware/security.py similarity index 100% rename from autogpt_platform/backend/backend/server/middleware/security.py rename to autogpt_platform/backend/backend/api/middleware/security.py diff --git a/autogpt_platform/backend/backend/server/middleware/security_test.py b/autogpt_platform/backend/backend/api/middleware/security_test.py similarity index 98% rename from autogpt_platform/backend/backend/server/middleware/security_test.py rename to autogpt_platform/backend/backend/api/middleware/security_test.py index 462e5b27ed..57137afc9a 100644 --- a/autogpt_platform/backend/backend/server/middleware/security_test.py +++ b/autogpt_platform/backend/backend/api/middleware/security_test.py @@ -3,7 +3,7 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from starlette.applications import Starlette -from backend.server.middleware.security import SecurityHeadersMiddleware +from backend.api.middleware.security import SecurityHeadersMiddleware @pytest.fixture diff --git a/autogpt_platform/backend/backend/server/model.py b/autogpt_platform/backend/backend/api/model.py similarity index 100% rename from autogpt_platform/backend/backend/server/model.py rename to autogpt_platform/backend/backend/api/model.py diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/api/rest_api.py similarity index 78% rename from autogpt_platform/backend/backend/server/rest_api.py rename to autogpt_platform/backend/backend/api/rest_api.py index 5db2b18c27..147f62e781 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/api/rest_api.py @@ -16,36 +16,33 @@ from fastapi.middleware.gzip import GZipMiddleware from fastapi.routing import APIRoute from prisma.errors import PrismaError +import backend.api.features.admin.credit_admin_routes +import backend.api.features.admin.execution_analytics_routes +import backend.api.features.admin.store_admin_routes +import backend.api.features.builder +import backend.api.features.builder.routes +import backend.api.features.chat.routes as chat_routes +import backend.api.features.executions.review.routes +import backend.api.features.library.db +import backend.api.features.library.model +import backend.api.features.library.routes +import backend.api.features.oauth +import backend.api.features.otto.routes +import backend.api.features.postmark.postmark +import backend.api.features.store.model +import backend.api.features.store.routes +import backend.api.features.v1 import backend.data.block import backend.data.db import backend.data.graph import backend.data.user import backend.integrations.webhooks.utils -import backend.server.routers.oauth -import backend.server.routers.postmark.postmark -import backend.server.routers.v1 -import backend.server.v2.admin.credit_admin_routes -import backend.server.v2.admin.execution_analytics_routes -import backend.server.v2.admin.store_admin_routes -import backend.server.v2.builder -import backend.server.v2.builder.routes -import backend.server.v2.chat.routes as chat_routes -import backend.server.v2.executions.review.routes -import backend.server.v2.library.db -import backend.server.v2.library.model -import backend.server.v2.library.routes -import backend.server.v2.otto.routes -import backend.server.v2.store.model -import backend.server.v2.store.routes import backend.util.service import backend.util.settings from backend.blocks.llm import LlmModel from backend.data.model import Credentials from backend.integrations.providers import ProviderName from backend.monitoring.instrumentation import instrument_fastapi -from backend.server.external.api import external_app -from backend.server.middleware.security import SecurityHeadersMiddleware -from backend.server.utils.cors import build_cors_params from backend.util import json from backend.util.cloud_storage import shutdown_cloud_storage_handler from backend.util.exceptions import ( @@ -56,6 +53,13 @@ from backend.util.exceptions import ( from backend.util.feature_flag import initialize_launchdarkly, shutdown_launchdarkly from backend.util.service import UnhealthyServiceError +from .external.fastapi_app import external_api +from .features.analytics import router as analytics_router +from .features.integrations.router import router as integrations_router +from .middleware.security import SecurityHeadersMiddleware +from .utils.cors import build_cors_params +from .utils.openapi import sort_openapi + settings = backend.util.settings.Settings() logger = logging.getLogger(__name__) @@ -176,6 +180,9 @@ app.add_middleware(GZipMiddleware, minimum_size=50_000) # 50KB threshold # Add 401 responses to authenticated endpoints in OpenAPI spec add_auth_responses_to_openapi(app) +# Sort OpenAPI schema to eliminate diff on refactors +sort_openapi(app) + # Add Prometheus instrumentation instrument_fastapi( app, @@ -254,42 +261,52 @@ app.add_exception_handler(MissingConfigError, handle_internal_http_error(503)) app.add_exception_handler(ValueError, handle_internal_http_error(400)) app.add_exception_handler(Exception, handle_internal_http_error(500)) -app.include_router(backend.server.routers.v1.v1_router, tags=["v1"], prefix="/api") +app.include_router(backend.api.features.v1.v1_router, tags=["v1"], prefix="/api") app.include_router( - backend.server.v2.store.routes.router, tags=["v2"], prefix="/api/store" + integrations_router, + prefix="/api/integrations", + tags=["v1", "integrations"], ) app.include_router( - backend.server.v2.builder.routes.router, tags=["v2"], prefix="/api/builder" + analytics_router, + prefix="/api/analytics", + tags=["analytics"], ) app.include_router( - backend.server.v2.admin.store_admin_routes.router, + backend.api.features.store.routes.router, tags=["v2"], prefix="/api/store" +) +app.include_router( + backend.api.features.builder.routes.router, tags=["v2"], prefix="/api/builder" +) +app.include_router( + backend.api.features.admin.store_admin_routes.router, tags=["v2", "admin"], prefix="/api/store", ) app.include_router( - backend.server.v2.admin.credit_admin_routes.router, + backend.api.features.admin.credit_admin_routes.router, tags=["v2", "admin"], prefix="/api/credits", ) app.include_router( - backend.server.v2.admin.execution_analytics_routes.router, + backend.api.features.admin.execution_analytics_routes.router, tags=["v2", "admin"], prefix="/api/executions", ) app.include_router( - backend.server.v2.executions.review.routes.router, + backend.api.features.executions.review.routes.router, tags=["v2", "executions", "review"], prefix="/api/review", ) app.include_router( - backend.server.v2.library.routes.router, tags=["v2"], prefix="/api/library" + backend.api.features.library.routes.router, tags=["v2"], prefix="/api/library" ) app.include_router( - backend.server.v2.otto.routes.router, tags=["v2", "otto"], prefix="/api/otto" + backend.api.features.otto.routes.router, tags=["v2", "otto"], prefix="/api/otto" ) app.include_router( - backend.server.routers.postmark.postmark.router, + backend.api.features.postmark.postmark.router, tags=["v1", "email"], prefix="/api/email", ) @@ -299,12 +316,12 @@ app.include_router( prefix="/api/chat", ) app.include_router( - backend.server.routers.oauth.router, + backend.api.features.oauth.router, tags=["oauth"], prefix="/api/oauth", ) -app.mount("/external-api", external_app) +app.mount("/external-api", external_api) @app.get(path="/health", tags=["health"], dependencies=[]) @@ -357,7 +374,7 @@ class AgentServer(backend.util.service.AppProcess): graph_version: Optional[int] = None, node_input: Optional[dict[str, Any]] = None, ): - return await backend.server.routers.v1.execute_graph( + return await backend.api.features.v1.execute_graph( user_id=user_id, graph_id=graph_id, graph_version=graph_version, @@ -372,16 +389,16 @@ class AgentServer(backend.util.service.AppProcess): user_id: str, for_export: bool = False, ): - return await backend.server.routers.v1.get_graph( + return await backend.api.features.v1.get_graph( graph_id, user_id, graph_version, for_export ) @staticmethod async def test_create_graph( - create_graph: backend.server.routers.v1.CreateGraph, + create_graph: backend.api.features.v1.CreateGraph, user_id: str, ): - return await backend.server.routers.v1.create_new_graph(create_graph, user_id) + return await backend.api.features.v1.create_new_graph(create_graph, user_id) @staticmethod async def test_get_graph_run_status(graph_exec_id: str, user_id: str): @@ -397,45 +414,45 @@ class AgentServer(backend.util.service.AppProcess): @staticmethod async def test_delete_graph(graph_id: str, user_id: str): """Used for clean-up after a test run""" - await backend.server.v2.library.db.delete_library_agent_by_graph_id( + await backend.api.features.library.db.delete_library_agent_by_graph_id( graph_id=graph_id, user_id=user_id ) - return await backend.server.routers.v1.delete_graph(graph_id, user_id) + return await backend.api.features.v1.delete_graph(graph_id, user_id) @staticmethod async def test_get_presets(user_id: str, page: int = 1, page_size: int = 10): - return await backend.server.v2.library.routes.presets.list_presets( + return await backend.api.features.library.routes.presets.list_presets( user_id=user_id, page=page, page_size=page_size ) @staticmethod async def test_get_preset(preset_id: str, user_id: str): - return await backend.server.v2.library.routes.presets.get_preset( + return await backend.api.features.library.routes.presets.get_preset( preset_id=preset_id, user_id=user_id ) @staticmethod async def test_create_preset( - preset: backend.server.v2.library.model.LibraryAgentPresetCreatable, + preset: backend.api.features.library.model.LibraryAgentPresetCreatable, user_id: str, ): - return await backend.server.v2.library.routes.presets.create_preset( + return await backend.api.features.library.routes.presets.create_preset( preset=preset, user_id=user_id ) @staticmethod async def test_update_preset( preset_id: str, - preset: backend.server.v2.library.model.LibraryAgentPresetUpdatable, + preset: backend.api.features.library.model.LibraryAgentPresetUpdatable, user_id: str, ): - return await backend.server.v2.library.routes.presets.update_preset( + return await backend.api.features.library.routes.presets.update_preset( preset_id=preset_id, preset=preset, user_id=user_id ) @staticmethod async def test_delete_preset(preset_id: str, user_id: str): - return await backend.server.v2.library.routes.presets.delete_preset( + return await backend.api.features.library.routes.presets.delete_preset( preset_id=preset_id, user_id=user_id ) @@ -445,7 +462,7 @@ class AgentServer(backend.util.service.AppProcess): user_id: str, inputs: Optional[dict[str, Any]] = None, ): - return await backend.server.v2.library.routes.presets.execute_preset( + return await backend.api.features.library.routes.presets.execute_preset( preset_id=preset_id, user_id=user_id, inputs=inputs or {}, @@ -454,18 +471,20 @@ class AgentServer(backend.util.service.AppProcess): @staticmethod async def test_create_store_listing( - request: backend.server.v2.store.model.StoreSubmissionRequest, user_id: str + request: backend.api.features.store.model.StoreSubmissionRequest, user_id: str ): - return await backend.server.v2.store.routes.create_submission(request, user_id) + return await backend.api.features.store.routes.create_submission( + request, user_id + ) ### ADMIN ### @staticmethod async def test_review_store_listing( - request: backend.server.v2.store.model.ReviewSubmissionRequest, + request: backend.api.features.store.model.ReviewSubmissionRequest, user_id: str, ): - return await backend.server.v2.admin.store_admin_routes.review_submission( + return await backend.api.features.admin.store_admin_routes.review_submission( request.store_listing_version_id, request, user_id ) @@ -475,10 +494,7 @@ class AgentServer(backend.util.service.AppProcess): provider: ProviderName, credentials: Credentials, ) -> Credentials: - from backend.server.integrations.router import ( - create_credentials, - get_credential, - ) + from .features.integrations.router import create_credentials, get_credential try: return await create_credentials( diff --git a/autogpt_platform/backend/backend/server/test_helpers.py b/autogpt_platform/backend/backend/api/test_helpers.py similarity index 100% rename from autogpt_platform/backend/backend/server/test_helpers.py rename to autogpt_platform/backend/backend/api/test_helpers.py diff --git a/autogpt_platform/backend/backend/server/utils/api_key_auth.py b/autogpt_platform/backend/backend/api/utils/api_key_auth.py similarity index 100% rename from autogpt_platform/backend/backend/server/utils/api_key_auth.py rename to autogpt_platform/backend/backend/api/utils/api_key_auth.py diff --git a/autogpt_platform/backend/backend/server/utils/api_key_auth_test.py b/autogpt_platform/backend/backend/api/utils/api_key_auth_test.py similarity index 99% rename from autogpt_platform/backend/backend/server/utils/api_key_auth_test.py rename to autogpt_platform/backend/backend/api/utils/api_key_auth_test.py index df6af6633c..39c3150561 100644 --- a/autogpt_platform/backend/backend/server/utils/api_key_auth_test.py +++ b/autogpt_platform/backend/backend/api/utils/api_key_auth_test.py @@ -8,7 +8,7 @@ import pytest from fastapi import HTTPException, Request from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN -from backend.server.utils.api_key_auth import APIKeyAuthenticator +from backend.api.utils.api_key_auth import APIKeyAuthenticator from backend.util.exceptions import MissingConfigError diff --git a/autogpt_platform/backend/backend/server/utils/cors.py b/autogpt_platform/backend/backend/api/utils/cors.py similarity index 100% rename from autogpt_platform/backend/backend/server/utils/cors.py rename to autogpt_platform/backend/backend/api/utils/cors.py diff --git a/autogpt_platform/backend/backend/server/utils/cors_test.py b/autogpt_platform/backend/backend/api/utils/cors_test.py similarity index 97% rename from autogpt_platform/backend/backend/server/utils/cors_test.py rename to autogpt_platform/backend/backend/api/utils/cors_test.py index 94b3f17cfc..011974383b 100644 --- a/autogpt_platform/backend/backend/server/utils/cors_test.py +++ b/autogpt_platform/backend/backend/api/utils/cors_test.py @@ -1,6 +1,6 @@ import pytest -from backend.server.utils.cors import build_cors_params +from backend.api.utils.cors import build_cors_params from backend.util.settings import AppEnvironment diff --git a/autogpt_platform/backend/backend/api/utils/openapi.py b/autogpt_platform/backend/backend/api/utils/openapi.py new file mode 100644 index 0000000000..757b220fd0 --- /dev/null +++ b/autogpt_platform/backend/backend/api/utils/openapi.py @@ -0,0 +1,41 @@ +from fastapi import FastAPI + + +def sort_openapi(app: FastAPI) -> None: + """ + Patch a FastAPI instance's `openapi()` method to sort the endpoints, + schemas, and responses. + """ + wrapped_openapi = app.openapi + + def custom_openapi(): + if app.openapi_schema: + return app.openapi_schema + + openapi_schema = wrapped_openapi() + + # Sort endpoints + openapi_schema["paths"] = dict(sorted(openapi_schema["paths"].items())) + + # Sort endpoints -> methods + for p in openapi_schema["paths"].keys(): + openapi_schema["paths"][p] = dict( + sorted(openapi_schema["paths"][p].items()) + ) + + # Sort endpoints -> methods -> responses + for m in openapi_schema["paths"][p].keys(): + openapi_schema["paths"][p][m]["responses"] = dict( + sorted(openapi_schema["paths"][p][m]["responses"].items()) + ) + + # Sort schemas and responses as well + for k in openapi_schema["components"].keys(): + openapi_schema["components"][k] = dict( + sorted(openapi_schema["components"][k].items()) + ) + + app.openapi_schema = openapi_schema + return openapi_schema + + app.openapi = custom_openapi diff --git a/autogpt_platform/backend/backend/server/ws_api.py b/autogpt_platform/backend/backend/api/ws_api.py similarity index 98% rename from autogpt_platform/backend/backend/server/ws_api.py rename to autogpt_platform/backend/backend/api/ws_api.py index 344fd7e1a6..b71fdb3526 100644 --- a/autogpt_platform/backend/backend/server/ws_api.py +++ b/autogpt_platform/backend/backend/api/ws_api.py @@ -9,6 +9,14 @@ from autogpt_libs.auth.jwt_utils import parse_jwt_token from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect from starlette.middleware.cors import CORSMiddleware +from backend.api.conn_manager import ConnectionManager +from backend.api.model import ( + WSMessage, + WSMethod, + WSSubscribeGraphExecutionRequest, + WSSubscribeGraphExecutionsRequest, +) +from backend.api.utils.cors import build_cors_params from backend.data.execution import AsyncRedisExecutionEventBus from backend.data.notification_bus import AsyncRedisNotificationEventBus from backend.data.user import DEFAULT_USER_ID @@ -16,14 +24,6 @@ from backend.monitoring.instrumentation import ( instrument_fastapi, update_websocket_connections, ) -from backend.server.conn_manager import ConnectionManager -from backend.server.model import ( - WSMessage, - WSMethod, - WSSubscribeGraphExecutionRequest, - WSSubscribeGraphExecutionsRequest, -) -from backend.server.utils.cors import build_cors_params from backend.util.retry import continuous_retry from backend.util.service import AppProcess from backend.util.settings import AppEnvironment, Config, Settings diff --git a/autogpt_platform/backend/backend/server/ws_api_test.py b/autogpt_platform/backend/backend/api/ws_api_test.py similarity index 92% rename from autogpt_platform/backend/backend/server/ws_api_test.py rename to autogpt_platform/backend/backend/api/ws_api_test.py index 0bc9902145..edab1bbded 100644 --- a/autogpt_platform/backend/backend/server/ws_api_test.py +++ b/autogpt_platform/backend/backend/api/ws_api_test.py @@ -6,17 +6,17 @@ import pytest from fastapi import WebSocket, WebSocketDisconnect from pytest_snapshot.plugin import Snapshot -from backend.data.user import DEFAULT_USER_ID -from backend.server.conn_manager import ConnectionManager -from backend.server.test_helpers import override_config -from backend.server.ws_api import AppEnvironment, WebsocketServer, WSMessage, WSMethod -from backend.server.ws_api import app as websocket_app -from backend.server.ws_api import ( +from backend.api.conn_manager import ConnectionManager +from backend.api.test_helpers import override_config +from backend.api.ws_api import AppEnvironment, WebsocketServer, WSMessage, WSMethod +from backend.api.ws_api import app as websocket_app +from backend.api.ws_api import ( handle_subscribe, handle_unsubscribe, settings, websocket_router, ) +from backend.data.user import DEFAULT_USER_ID @pytest.fixture @@ -36,12 +36,12 @@ def test_websocket_server_uses_cors_helper(mocker) -> None: "allow_origins": ["https://app.example.com"], "allow_origin_regex": None, } - mocker.patch("backend.server.ws_api.uvicorn.run") + mocker.patch("backend.api.ws_api.uvicorn.run") cors_middleware = mocker.patch( - "backend.server.ws_api.CORSMiddleware", return_value=object() + "backend.api.ws_api.CORSMiddleware", return_value=object() ) build_cors = mocker.patch( - "backend.server.ws_api.build_cors_params", return_value=cors_params + "backend.api.ws_api.build_cors_params", return_value=cors_params ) with override_config( @@ -63,7 +63,7 @@ def test_websocket_server_uses_cors_helper(mocker) -> None: def test_websocket_server_blocks_localhost_in_production(mocker) -> None: - mocker.patch("backend.server.ws_api.uvicorn.run") + mocker.patch("backend.api.ws_api.uvicorn.run") with override_config( settings, "backend_cors_allow_origins", ["http://localhost:3000"] @@ -78,7 +78,7 @@ async def test_websocket_router_subscribe( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ @@ -128,7 +128,7 @@ async def test_websocket_router_unsubscribe( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ @@ -175,7 +175,7 @@ async def test_websocket_router_invalid_method( ) -> None: # Mock the authenticate_websocket function to ensure it returns a valid user_id mocker.patch( - "backend.server.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID + "backend.api.ws_api.authenticate_websocket", return_value=DEFAULT_USER_ID ) mock_websocket.receive_text.side_effect = [ diff --git a/autogpt_platform/backend/backend/app.py b/autogpt_platform/backend/backend/app.py index 596962ae0b..0afed130ed 100644 --- a/autogpt_platform/backend/backend/app.py +++ b/autogpt_platform/backend/backend/app.py @@ -36,10 +36,10 @@ def main(**kwargs): Run all the processes required for the AutoGPT-server (REST and WebSocket APIs). """ + from backend.api.rest_api import AgentServer + from backend.api.ws_api import WebsocketServer from backend.executor import DatabaseManager, ExecutionManager, Scheduler from backend.notifications import NotificationManager - from backend.server.rest_api import AgentServer - from backend.server.ws_api import WebsocketServer run_processes( DatabaseManager().set_log_level("warning"), diff --git a/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py b/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py index 29f572d0d6..deff4278f9 100644 --- a/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py +++ b/autogpt_platform/backend/backend/blocks/test/test_smart_decision_maker.py @@ -5,10 +5,10 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from backend.api.model import CreateGraph +from backend.api.rest_api import AgentServer from backend.data.execution import ExecutionContext from backend.data.model import ProviderName, User -from backend.server.model import CreateGraph -from backend.server.rest_api import AgentServer from backend.usecases.sample import create_test_graph, create_test_user from backend.util.test import SpinTestServer, wait_execution diff --git a/autogpt_platform/backend/backend/cli.py b/autogpt_platform/backend/backend/cli.py index 0fc1bc53c1..d6eaca1dd0 100755 --- a/autogpt_platform/backend/backend/cli.py +++ b/autogpt_platform/backend/backend/cli.py @@ -244,11 +244,7 @@ def websocket(server_address: str, graph_exec_id: str): import websockets.asyncio.client - from backend.server.ws_api import ( - WSMessage, - WSMethod, - WSSubscribeGraphExecutionRequest, - ) + from backend.api.ws_api import WSMessage, WSMethod, WSSubscribeGraphExecutionRequest async def send_message(server_address: str): uri = f"ws://{server_address}" diff --git a/autogpt_platform/backend/backend/cli/generate_openapi_json.py b/autogpt_platform/backend/backend/cli/generate_openapi_json.py index 313e603c44..de74c0b5d2 100644 --- a/autogpt_platform/backend/backend/cli/generate_openapi_json.py +++ b/autogpt_platform/backend/backend/cli/generate_openapi_json.py @@ -2,7 +2,7 @@ """ Script to generate OpenAPI JSON specification for the FastAPI app. -This script imports the FastAPI app from backend.server.rest_api and outputs +This script imports the FastAPI app from backend.api.rest_api and outputs the OpenAPI specification as JSON to stdout or a specified file. Usage: @@ -46,7 +46,7 @@ def main(output: Path, pretty: bool): def get_openapi_schema(): """Get the OpenAPI schema from the FastAPI app""" - from backend.server.rest_api import app + from backend.api.rest_api import app return app.openapi() diff --git a/autogpt_platform/backend/backend/data/__init__.py b/autogpt_platform/backend/backend/data/__init__.py index 31ab09a5df..c98667e362 100644 --- a/autogpt_platform/backend/backend/data/__init__.py +++ b/autogpt_platform/backend/backend/data/__init__.py @@ -1,4 +1,4 @@ -from backend.server.v2.library.model import LibraryAgentPreset +from backend.api.features.library.model import LibraryAgentPreset from .graph import NodeModel from .integrations import Webhook # noqa: F401 diff --git a/autogpt_platform/backend/backend/data/credit.py b/autogpt_platform/backend/backend/data/credit.py index a8942d3b2e..95f0b158e1 100644 --- a/autogpt_platform/backend/backend/data/credit.py +++ b/autogpt_platform/backend/backend/data/credit.py @@ -16,6 +16,7 @@ from prisma.models import CreditRefundRequest, CreditTransaction, User, UserBala from prisma.types import CreditRefundRequestCreateInput, CreditTransactionWhereInput from pydantic import BaseModel +from backend.api.features.admin.model import UserHistoryResponse from backend.data.block_cost_config import BLOCK_COSTS from backend.data.db import query_raw_with_schema from backend.data.includes import MAX_CREDIT_REFUND_REQUESTS_FETCH @@ -29,7 +30,6 @@ from backend.data.model import ( from backend.data.notifications import NotificationEventModel, RefundRequestData from backend.data.user import get_user_by_id, get_user_email_by_id from backend.notifications.notifications import queue_notification_async -from backend.server.v2.admin.model import UserHistoryResponse from backend.util.exceptions import InsufficientBalanceError from backend.util.feature_flag import Flag, is_feature_enabled from backend.util.json import SafeJson, dumps diff --git a/autogpt_platform/backend/backend/data/db.py b/autogpt_platform/backend/backend/data/db.py index b64ce4ef5c..31a27e9163 100644 --- a/autogpt_platform/backend/backend/data/db.py +++ b/autogpt_platform/backend/backend/data/db.py @@ -111,7 +111,7 @@ def get_database_schema() -> str: async def query_raw_with_schema(query_template: str, *args) -> list[dict]: """Execute raw SQL query with proper schema handling.""" schema = get_database_schema() - schema_prefix = f"{schema}." if schema != "public" else "" + schema_prefix = f'"{schema}".' if schema != "public" else "" formatted_query = query_template.format(schema_prefix=schema_prefix) import prisma as prisma_module diff --git a/autogpt_platform/backend/backend/data/graph_test.py b/autogpt_platform/backend/backend/data/graph_test.py index e570392658..044d75e0ca 100644 --- a/autogpt_platform/backend/backend/data/graph_test.py +++ b/autogpt_platform/backend/backend/data/graph_test.py @@ -6,14 +6,14 @@ import fastapi.exceptions import pytest from pytest_snapshot.plugin import Snapshot -import backend.server.v2.store.model as store +import backend.api.features.store.model as store +from backend.api.model import CreateGraph from backend.blocks.basic import StoreValueBlock from backend.blocks.io import AgentInputBlock, AgentOutputBlock from backend.data.block import BlockSchema, BlockSchemaInput from backend.data.graph import Graph, Link, Node from backend.data.model import SchemaField from backend.data.user import DEFAULT_USER_ID -from backend.server.model import CreateGraph from backend.usecases.sample import create_test_user from backend.util.test import SpinTestServer diff --git a/autogpt_platform/backend/backend/data/human_review.py b/autogpt_platform/backend/backend/data/human_review.py index 11b87ec1dd..de7a30759e 100644 --- a/autogpt_platform/backend/backend/data/human_review.py +++ b/autogpt_platform/backend/backend/data/human_review.py @@ -13,7 +13,7 @@ from prisma.models import PendingHumanReview from prisma.types import PendingHumanReviewUpdateInput from pydantic import BaseModel -from backend.server.v2.executions.review.model import ( +from backend.api.features.executions.review.model import ( PendingHumanReviewModel, SafeJsonData, ) diff --git a/autogpt_platform/backend/backend/data/integrations.py b/autogpt_platform/backend/backend/data/integrations.py index 0f328e81b7..5f44f928bd 100644 --- a/autogpt_platform/backend/backend/data/integrations.py +++ b/autogpt_platform/backend/backend/data/integrations.py @@ -23,7 +23,7 @@ from backend.util.exceptions import NotFoundError from backend.util.json import SafeJson if TYPE_CHECKING: - from backend.server.v2.library.model import LibraryAgentPreset + from backend.api.features.library.model import LibraryAgentPreset from .db import BaseDbModel from .graph import NodeModel @@ -79,7 +79,7 @@ class WebhookWithRelations(Webhook): # integrations.py → library/model.py → integrations.py (for Webhook) # Runtime import is used in WebhookWithRelations.from_db() method instead # Import at runtime to avoid circular dependency - from backend.server.v2.library.model import LibraryAgentPreset + from backend.api.features.library.model import LibraryAgentPreset return WebhookWithRelations( **Webhook.from_db(webhook).model_dump(), @@ -285,8 +285,8 @@ async def unlink_webhook_from_graph( user_id: The ID of the user (for authorization) """ # Avoid circular imports + from backend.api.features.library.db import set_preset_webhook from backend.data.graph import set_node_webhook - from backend.server.v2.library.db import set_preset_webhook # Find all nodes in this graph that use this webhook nodes = await AgentNode.prisma().find_many( diff --git a/autogpt_platform/backend/backend/data/notification_bus.py b/autogpt_platform/backend/backend/data/notification_bus.py index 6eb90dca12..fbd484d379 100644 --- a/autogpt_platform/backend/backend/data/notification_bus.py +++ b/autogpt_platform/backend/backend/data/notification_bus.py @@ -4,8 +4,8 @@ from typing import AsyncGenerator from pydantic import BaseModel, field_serializer +from backend.api.model import NotificationPayload from backend.data.event_bus import AsyncRedisEventBus -from backend.server.model import NotificationPayload from backend.util.settings import Settings diff --git a/autogpt_platform/backend/backend/data/onboarding.py b/autogpt_platform/backend/backend/data/onboarding.py index 1415c7694e..d9977e9535 100644 --- a/autogpt_platform/backend/backend/data/onboarding.py +++ b/autogpt_platform/backend/backend/data/onboarding.py @@ -9,6 +9,8 @@ from prisma.enums import OnboardingStep from prisma.models import UserOnboarding from prisma.types import UserOnboardingCreateInput, UserOnboardingUpdateInput +from backend.api.features.store.model import StoreAgentDetails +from backend.api.model import OnboardingNotificationPayload from backend.data import execution as execution_db from backend.data.credit import get_user_credit_model from backend.data.notification_bus import ( @@ -16,8 +18,6 @@ from backend.data.notification_bus import ( NotificationEvent, ) from backend.data.user import get_user_by_id -from backend.server.model import OnboardingNotificationPayload -from backend.server.v2.store.model import StoreAgentDetails from backend.util.cache import cached from backend.util.json import SafeJson from backend.util.timezone_utils import get_user_timezone_or_utc diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/__init__.py b/autogpt_platform/backend/backend/executor/automod/__init__.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/AutoMod/__init__.py rename to autogpt_platform/backend/backend/executor/automod/__init__.py diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/manager.py b/autogpt_platform/backend/backend/executor/automod/manager.py similarity index 99% rename from autogpt_platform/backend/backend/server/v2/AutoMod/manager.py rename to autogpt_platform/backend/backend/executor/automod/manager.py index 181fcec248..81001196dd 100644 --- a/autogpt_platform/backend/backend/server/v2/AutoMod/manager.py +++ b/autogpt_platform/backend/backend/executor/automod/manager.py @@ -9,16 +9,13 @@ if TYPE_CHECKING: from pydantic import ValidationError from backend.data.execution import ExecutionStatus -from backend.server.v2.AutoMod.models import ( - AutoModRequest, - AutoModResponse, - ModerationConfig, -) from backend.util.exceptions import ModerationError from backend.util.feature_flag import Flag, is_feature_enabled from backend.util.request import Requests from backend.util.settings import Settings +from .models import AutoModRequest, AutoModResponse, ModerationConfig + logger = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/server/v2/AutoMod/models.py b/autogpt_platform/backend/backend/executor/automod/models.py similarity index 100% rename from autogpt_platform/backend/backend/server/v2/AutoMod/models.py rename to autogpt_platform/backend/backend/executor/automod/models.py diff --git a/autogpt_platform/backend/backend/executor/database.py b/autogpt_platform/backend/backend/executor/database.py index 9c2b3970c6..af68bf526d 100644 --- a/autogpt_platform/backend/backend/executor/database.py +++ b/autogpt_platform/backend/backend/executor/database.py @@ -2,6 +2,11 @@ import logging from contextlib import asynccontextmanager from typing import TYPE_CHECKING, Callable, Concatenate, ParamSpec, TypeVar, cast +from backend.api.features.library.db import ( + add_store_agent_to_library, + list_library_agents, +) +from backend.api.features.store.db import get_store_agent_details, get_store_agents from backend.data import db from backend.data.analytics import ( get_accuracy_trends_and_alerts, @@ -61,8 +66,6 @@ from backend.data.user import ( get_user_notification_preference, update_user_integrations, ) -from backend.server.v2.library.db import add_store_agent_to_library, list_library_agents -from backend.server.v2.store.db import get_store_agent_details, get_store_agents from backend.util.service import ( AppService, AppServiceClient, diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py index 234f8127c8..161e68b0d6 100644 --- a/autogpt_platform/backend/backend/executor/manager.py +++ b/autogpt_platform/backend/backend/executor/manager.py @@ -48,27 +48,8 @@ from backend.data.notifications import ( ZeroBalanceData, ) from backend.data.rabbitmq import SyncRabbitMQ -from backend.executor.activity_status_generator import ( - generate_activity_status_for_execution, -) -from backend.executor.utils import ( - GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS, - GRAPH_EXECUTION_CANCEL_QUEUE_NAME, - GRAPH_EXECUTION_EXCHANGE, - GRAPH_EXECUTION_QUEUE_NAME, - GRAPH_EXECUTION_ROUTING_KEY, - CancelExecutionEvent, - ExecutionOutputEntry, - LogMetadata, - NodeExecutionProgress, - block_usage_cost, - create_execution_queue_config, - execution_usage_cost, - validate_exec, -) from backend.integrations.creds_manager import IntegrationCredentialsManager from backend.notifications.notifications import queue_notification -from backend.server.v2.AutoMod.manager import automod_manager from backend.util import json from backend.util.clients import ( get_async_execution_event_bus, @@ -95,7 +76,24 @@ from backend.util.retry import ( ) from backend.util.settings import Settings +from .activity_status_generator import generate_activity_status_for_execution +from .automod.manager import automod_manager from .cluster_lock import ClusterLock +from .utils import ( + GRACEFUL_SHUTDOWN_TIMEOUT_SECONDS, + GRAPH_EXECUTION_CANCEL_QUEUE_NAME, + GRAPH_EXECUTION_EXCHANGE, + GRAPH_EXECUTION_QUEUE_NAME, + GRAPH_EXECUTION_ROUTING_KEY, + CancelExecutionEvent, + ExecutionOutputEntry, + LogMetadata, + NodeExecutionProgress, + block_usage_cost, + create_execution_queue_config, + execution_usage_cost, + validate_exec, +) if TYPE_CHECKING: from backend.executor import DatabaseManagerAsyncClient, DatabaseManagerClient diff --git a/autogpt_platform/backend/backend/executor/manager_test.py b/autogpt_platform/backend/backend/executor/manager_test.py index cd543fef4e..bdfdb5d724 100644 --- a/autogpt_platform/backend/backend/executor/manager_test.py +++ b/autogpt_platform/backend/backend/executor/manager_test.py @@ -3,16 +3,16 @@ import logging import fastapi.responses import pytest -import backend.server.v2.library.model -import backend.server.v2.store.model +import backend.api.features.library.model +import backend.api.features.store.model +from backend.api.model import CreateGraph +from backend.api.rest_api import AgentServer from backend.blocks.basic import StoreValueBlock from backend.blocks.data_manipulation import FindInDictionaryBlock from backend.blocks.io import AgentInputBlock from backend.blocks.maths import CalculatorBlock, Operation from backend.data import execution, graph from backend.data.model import User -from backend.server.model import CreateGraph -from backend.server.rest_api import AgentServer from backend.usecases.sample import create_test_graph, create_test_user from backend.util.test import SpinTestServer, wait_execution @@ -356,7 +356,7 @@ async def test_execute_preset(server: SpinTestServer): test_graph = await create_graph(server, test_graph, test_user) # Create preset with initial values - preset = backend.server.v2.library.model.LibraryAgentPresetCreatable( + preset = backend.api.features.library.model.LibraryAgentPresetCreatable( name="Test Preset With Clash", description="Test preset with clashing input values", graph_id=test_graph.id, @@ -444,7 +444,7 @@ async def test_execute_preset_with_clash(server: SpinTestServer): test_graph = await create_graph(server, test_graph, test_user) # Create preset with initial values - preset = backend.server.v2.library.model.LibraryAgentPresetCreatable( + preset = backend.api.features.library.model.LibraryAgentPresetCreatable( name="Test Preset With Clash", description="Test preset with clashing input values", graph_id=test_graph.id, @@ -485,7 +485,7 @@ async def test_store_listing_graph(server: SpinTestServer): test_user = await create_test_user() test_graph = await create_graph(server, create_test_graph(), test_user) - store_submission_request = backend.server.v2.store.model.StoreSubmissionRequest( + store_submission_request = backend.api.features.store.model.StoreSubmissionRequest( agent_id=test_graph.id, agent_version=test_graph.version, slug=test_graph.id, @@ -514,7 +514,7 @@ async def test_store_listing_graph(server: SpinTestServer): admin_user = await create_test_user(alt_user=True) await server.agent_server.test_review_store_listing( - backend.server.v2.store.model.ReviewSubmissionRequest( + backend.api.features.store.model.ReviewSubmissionRequest( store_listing_version_id=slv_id, is_approved=True, comments="Test comments", @@ -523,7 +523,7 @@ async def test_store_listing_graph(server: SpinTestServer): ) # Add the approved store listing to the admin user's library so they can execute it - from backend.server.v2.library.db import add_store_agent_to_library + from backend.api.features.library.db import add_store_agent_to_library await add_store_agent_to_library( store_listing_version_id=slv_id, user_id=admin_user.id diff --git a/autogpt_platform/backend/backend/executor/scheduler_test.py b/autogpt_platform/backend/backend/executor/scheduler_test.py index c4fa35d46c..21acbaf0e1 100644 --- a/autogpt_platform/backend/backend/executor/scheduler_test.py +++ b/autogpt_platform/backend/backend/executor/scheduler_test.py @@ -1,7 +1,7 @@ import pytest +from backend.api.model import CreateGraph from backend.data import db -from backend.server.model import CreateGraph from backend.usecases.sample import create_test_graph, create_test_user from backend.util.clients import get_scheduler_client from backend.util.test import SpinTestServer diff --git a/autogpt_platform/backend/backend/integrations/webhooks/utils.py b/autogpt_platform/backend/backend/integrations/webhooks/utils.py index fafd641c93..79316c4c0e 100644 --- a/autogpt_platform/backend/backend/integrations/webhooks/utils.py +++ b/autogpt_platform/backend/backend/integrations/webhooks/utils.py @@ -149,10 +149,10 @@ async def setup_webhook_for_block( async def migrate_legacy_triggered_graphs(): from prisma.models import AgentGraph + from backend.api.features.library.db import create_preset + from backend.api.features.library.model import LibraryAgentPresetCreatable from backend.data.graph import AGENT_GRAPH_INCLUDE, GraphModel, set_node_webhook from backend.data.model import is_credentials_field_name - from backend.server.v2.library.db import create_preset - from backend.server.v2.library.model import LibraryAgentPresetCreatable triggered_graphs = [ GraphModel.from_db(_graph) diff --git a/autogpt_platform/backend/backend/rest.py b/autogpt_platform/backend/backend/rest.py index b601144c6f..96a807c125 100644 --- a/autogpt_platform/backend/backend/rest.py +++ b/autogpt_platform/backend/backend/rest.py @@ -1,5 +1,5 @@ +from backend.api.rest_api import AgentServer from backend.app import run_processes -from backend.server.rest_api import AgentServer def main(): diff --git a/autogpt_platform/backend/backend/server/external/api.py b/autogpt_platform/backend/backend/server/external/api.py deleted file mode 100644 index 3bafa63108..0000000000 --- a/autogpt_platform/backend/backend/server/external/api.py +++ /dev/null @@ -1,29 +0,0 @@ -from fastapi import FastAPI - -from backend.monitoring.instrumentation import instrument_fastapi -from backend.server.middleware.security import SecurityHeadersMiddleware - -from .routes.integrations import integrations_router -from .routes.tools import tools_router -from .routes.v1 import v1_router - -external_app = FastAPI( - title="AutoGPT External API", - description="External API for AutoGPT integrations", - docs_url="/docs", - version="1.0", -) - -external_app.add_middleware(SecurityHeadersMiddleware) -external_app.include_router(v1_router, prefix="/v1") -external_app.include_router(tools_router, prefix="/v1") -external_app.include_router(integrations_router, prefix="/v1") - -# Add Prometheus instrumentation -instrument_fastapi( - external_app, - service_name="external-api", - expose_endpoint=True, - endpoint="/metrics", - include_in_schema=True, -) diff --git a/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py b/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py deleted file mode 100644 index 7040faa0b5..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_improved_test.py +++ /dev/null @@ -1,150 +0,0 @@ -"""Example of analytics tests with improved error handling and assertions.""" - -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes -from backend.server.test_helpers import ( - assert_error_response_structure, - assert_mock_called_with_partial, - assert_response_status, - safe_parse_json, -) - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -def test_log_raw_metric_success_improved( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw metric logging with improved assertions.""" - # Mock the analytics function - mock_result = Mock(id="metric-123-uuid") - - mock_log_metric = mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": "page_load_time", - "metric_value": 2.5, - "data_string": "/dashboard", - } - - response = client.post("/log_raw_metric", json=request_data) - - # Improved assertions with better error messages - assert_response_status(response, 200, "Metric logging should succeed") - response_data = safe_parse_json(response, "Metric response parsing") - - assert response_data == "metric-123-uuid", f"Unexpected response: {response_data}" - - # Verify the function was called with correct parameters - assert_mock_called_with_partial( - mock_log_metric, - user_id=test_user_id, - metric_name="page_load_time", - metric_value=2.5, - data_string="/dashboard", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"metric_id": response_data}, indent=2, sort_keys=True), - "analytics_log_metric_success_improved", - ) - - -def test_log_raw_metric_invalid_request_improved() -> None: - """Test invalid metric request with improved error assertions.""" - # Test missing required fields - response = client.post("/log_raw_metric", json={}) - - error_data = assert_error_response_structure( - response, expected_status=422, expected_error_fields=["loc", "msg", "type"] - ) - - # Verify specific error details - detail = error_data["detail"] - assert isinstance(detail, list), "Error detail should be a list" - assert len(detail) > 0, "Should have at least one error" - - # Check that required fields are mentioned in errors - error_fields = [error["loc"][-1] for error in detail if "loc" in error] - assert "metric_name" in error_fields, "Should report missing metric_name" - assert "metric_value" in error_fields, "Should report missing metric_value" - assert "data_string" in error_fields, "Should report missing data_string" - - -def test_log_raw_metric_type_validation_improved( - mocker: pytest_mock.MockFixture, -) -> None: - """Test metric type validation with improved assertions.""" - # Mock the analytics function to avoid event loop issues - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=Mock(id="test-id"), - ) - - invalid_requests = [ - { - "data": { - "metric_name": "test", - "metric_value": "not_a_number", # Invalid type - "data_string": "test", - }, - "expected_error": "Input should be a valid number", - }, - { - "data": { - "metric_name": "", # Empty string - "metric_value": 1.0, - "data_string": "test", - }, - "expected_error": "String should have at least 1 character", - }, - { - "data": { - "metric_name": "test", - "metric_value": 123, # Valid number - "data_string": "", # Empty data_string - }, - "expected_error": "String should have at least 1 character", - }, - ] - - for test_case in invalid_requests: - response = client.post("/log_raw_metric", json=test_case["data"]) - - error_data = assert_error_response_structure(response, expected_status=422) - - # Check that expected error is in the response - error_text = json.dumps(error_data) - assert ( - test_case["expected_error"] in error_text - or test_case["expected_error"].lower() in error_text.lower() - ), f"Expected error '{test_case['expected_error']}' not found in: {error_text}" diff --git a/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py b/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py deleted file mode 100644 index 9dbf03b727..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_parametrized_test.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Example of parametrized tests for analytics endpoints.""" - -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -@pytest.mark.parametrize( - "metric_value,metric_name,data_string,test_id", - [ - (100, "api_calls_count", "external_api", "integer_value"), - (0, "error_count", "no_errors", "zero_value"), - (-5.2, "temperature_delta", "cooling", "negative_value"), - (1.23456789, "precision_test", "float_precision", "float_precision"), - (999999999, "large_number", "max_value", "large_number"), - (0.0000001, "tiny_number", "min_value", "tiny_number"), - ], -) -def test_log_raw_metric_values_parametrized( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - metric_value: float, - metric_name: str, - data_string: str, - test_id: str, -) -> None: - """Test raw metric logging with various metric values using parametrize.""" - # Mock the analytics function - mock_result = Mock(id=f"metric-{test_id}-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": metric_name, - "metric_value": metric_value, - "data_string": data_string, - } - - response = client.post("/log_raw_metric", json=request_data) - - # Better error handling - assert response.status_code == 200, f"Failed for {test_id}: {response.text}" - response_data = response.json() - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps( - {"metric_id": response_data, "test_case": test_id}, indent=2, sort_keys=True - ), - f"analytics_metric_{test_id}", - ) - - -@pytest.mark.parametrize( - "invalid_data,expected_error", - [ - ({}, "Field required"), # Missing all fields - ({"metric_name": "test"}, "Field required"), # Missing metric_value - ( - {"metric_name": "test", "metric_value": "not_a_number"}, - "Input should be a valid number", - ), # Invalid type - ( - {"metric_name": "", "metric_value": 1.0, "data_string": "test"}, - "String should have at least 1 character", - ), # Empty name - ], -) -def test_log_raw_metric_invalid_requests_parametrized( - mocker: pytest_mock.MockFixture, - invalid_data: dict, - expected_error: str, -) -> None: - """Test invalid metric requests with parametrize.""" - # Mock the analytics function to avoid event loop issues - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=Mock(id="test-id"), - ) - - response = client.post("/log_raw_metric", json=invalid_data) - - assert response.status_code == 422 - error_detail = response.json() - assert "detail" in error_detail - # Verify error message contains expected error - error_text = json.dumps(error_detail) - assert expected_error in error_text or expected_error.lower() in error_text.lower() diff --git a/autogpt_platform/backend/backend/server/routers/analytics_test.py b/autogpt_platform/backend/backend/server/routers/analytics_test.py deleted file mode 100644 index 16ee6708dc..0000000000 --- a/autogpt_platform/backend/backend/server/routers/analytics_test.py +++ /dev/null @@ -1,284 +0,0 @@ -import json -from unittest.mock import AsyncMock, Mock - -import fastapi -import fastapi.testclient -import pytest -import pytest_mock -from pytest_snapshot.plugin import Snapshot - -import backend.server.routers.analytics as analytics_routes - -app = fastapi.FastAPI() -app.include_router(analytics_routes.router) - -client = fastapi.testclient.TestClient(app) - - -@pytest.fixture(autouse=True) -def setup_app_auth(mock_jwt_user): - """Setup auth overrides for all tests in this module""" - from autogpt_libs.auth.jwt_utils import get_jwt_payload - - app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"] - yield - app.dependency_overrides.clear() - - -def test_log_raw_metric_success( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw metric logging""" - - # Mock the analytics function - mock_result = Mock(id="metric-123-uuid") - - mock_log_metric = mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "metric_name": "page_load_time", - "metric_value": 2.5, - "data_string": "/dashboard", - } - - response = client.post("/log_raw_metric", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - assert response_data == "metric-123-uuid" - - # Verify the function was called with correct parameters - mock_log_metric.assert_called_once_with( - user_id=test_user_id, - metric_name="page_load_time", - metric_value=2.5, - data_string="/dashboard", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), - "analytics_log_metric_success", - ) - - -def test_log_raw_metric_various_values( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, -) -> None: - """Test raw metric logging with various metric values""" - - # Mock the analytics function - mock_result = Mock(id="metric-456-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_metric", - new_callable=AsyncMock, - return_value=mock_result, - ) - - # Test with integer value - request_data = { - "metric_name": "api_calls_count", - "metric_value": 100, - "data_string": "external_api", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Test with zero value - request_data = { - "metric_name": "error_count", - "metric_value": 0, - "data_string": "no_errors", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Test with negative value - request_data = { - "metric_name": "temperature_delta", - "metric_value": -5.2, - "data_string": "cooling", - } - - response = client.post("/log_raw_metric", json=request_data) - assert response.status_code == 200 - - # Snapshot the last response - configured_snapshot.assert_match( - json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True), - "analytics_log_metric_various_values", - ) - - -def test_log_raw_analytics_success( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, - test_user_id: str, -) -> None: - """Test successful raw analytics logging""" - - # Mock the analytics function - mock_result = Mock(id="analytics-789-uuid") - - mock_log_analytics = mocker.patch( - "backend.data.analytics.log_raw_analytics", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "type": "user_action", - "data": { - "action": "button_click", - "button_id": "submit_form", - "timestamp": "2023-01-01T00:00:00Z", - "metadata": { - "form_type": "registration", - "fields_filled": 5, - }, - }, - "data_index": "button_click_submit_form", - } - - response = client.post("/log_raw_analytics", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - assert response_data == "analytics-789-uuid" - - # Verify the function was called with correct parameters - mock_log_analytics.assert_called_once_with( - test_user_id, - "user_action", - request_data["data"], - "button_click_submit_form", - ) - - # Snapshot test the response - configured_snapshot.assert_match( - json.dumps({"analytics_id": response_data}, indent=2, sort_keys=True), - "analytics_log_analytics_success", - ) - - -def test_log_raw_analytics_complex_data( - mocker: pytest_mock.MockFixture, - configured_snapshot: Snapshot, -) -> None: - """Test raw analytics logging with complex nested data""" - - # Mock the analytics function - mock_result = Mock(id="analytics-complex-uuid") - - mocker.patch( - "backend.data.analytics.log_raw_analytics", - new_callable=AsyncMock, - return_value=mock_result, - ) - - request_data = { - "type": "agent_execution", - "data": { - "agent_id": "agent_123", - "execution_id": "exec_456", - "status": "completed", - "duration_ms": 3500, - "nodes_executed": 15, - "blocks_used": [ - {"block_id": "llm_block", "count": 3}, - {"block_id": "http_block", "count": 5}, - {"block_id": "code_block", "count": 2}, - ], - "errors": [], - "metadata": { - "trigger": "manual", - "user_tier": "premium", - "environment": "production", - }, - }, - "data_index": "agent_123_exec_456", - } - - response = client.post("/log_raw_analytics", json=request_data) - - assert response.status_code == 200 - response_data = response.json() - - # Snapshot test the complex data structure - configured_snapshot.assert_match( - json.dumps( - { - "analytics_id": response_data, - "logged_data": request_data["data"], - }, - indent=2, - sort_keys=True, - ), - "analytics_log_analytics_complex_data", - ) - - -def test_log_raw_metric_invalid_request() -> None: - """Test raw metric logging with invalid request data""" - # Missing required fields - response = client.post("/log_raw_metric", json={}) - assert response.status_code == 422 - - # Invalid metric_value type - response = client.post( - "/log_raw_metric", - json={ - "metric_name": "test", - "metric_value": "not_a_number", - "data_string": "test", - }, - ) - assert response.status_code == 422 - - # Missing data_string - response = client.post( - "/log_raw_metric", - json={ - "metric_name": "test", - "metric_value": 1.0, - }, - ) - assert response.status_code == 422 - - -def test_log_raw_analytics_invalid_request() -> None: - """Test raw analytics logging with invalid request data""" - # Missing required fields - response = client.post("/log_raw_analytics", json={}) - assert response.status_code == 422 - - # Invalid data type (should be dict) - response = client.post( - "/log_raw_analytics", - json={ - "type": "test", - "data": "not_a_dict", - "data_index": "test", - }, - ) - assert response.status_code == 422 - - # Missing data_index - response = client.post( - "/log_raw_analytics", - json={ - "type": "test", - "data": {"key": "value"}, - }, - ) - assert response.status_code == 422 diff --git a/autogpt_platform/backend/backend/util/test.py b/autogpt_platform/backend/backend/util/test.py index 95ea9554ed..1e8244ff8e 100644 --- a/autogpt_platform/backend/backend/util/test.py +++ b/autogpt_platform/backend/backend/util/test.py @@ -6,6 +6,7 @@ from typing import Sequence, cast from autogpt_libs.auth import get_user_id +from backend.api.rest_api import AgentServer from backend.data import db from backend.data.block import Block, BlockSchema, initialize_blocks from backend.data.execution import ( @@ -18,7 +19,6 @@ from backend.data.model import _BaseCredentials from backend.data.user import create_default_user from backend.executor import DatabaseManager, ExecutionManager, Scheduler from backend.notifications.notifications import NotificationManager -from backend.server.rest_api import AgentServer log = logging.getLogger(__name__) diff --git a/autogpt_platform/backend/backend/util/virus_scanner.py b/autogpt_platform/backend/backend/util/virus_scanner.py index 1ea31cac95..aa43e5f5d9 100644 --- a/autogpt_platform/backend/backend/util/virus_scanner.py +++ b/autogpt_platform/backend/backend/util/virus_scanner.py @@ -196,7 +196,7 @@ async def scan_content_safe(content: bytes, *, filename: str = "unknown") -> Non VirusDetectedError: If virus is found VirusScanError: If scanning fails """ - from backend.server.v2.store.exceptions import VirusDetectedError, VirusScanError + from backend.api.features.store.exceptions import VirusDetectedError, VirusScanError try: result = await get_virus_scanner().scan_file(content, filename=filename) diff --git a/autogpt_platform/backend/backend/util/virus_scanner_test.py b/autogpt_platform/backend/backend/util/virus_scanner_test.py index 81b5ad3342..77010c7320 100644 --- a/autogpt_platform/backend/backend/util/virus_scanner_test.py +++ b/autogpt_platform/backend/backend/util/virus_scanner_test.py @@ -3,7 +3,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest -from backend.server.v2.store.exceptions import VirusDetectedError, VirusScanError +from backend.api.features.store.exceptions import VirusDetectedError, VirusScanError from backend.util.virus_scanner import ( VirusScannerService, VirusScannerSettings, diff --git a/autogpt_platform/backend/backend/ws.py b/autogpt_platform/backend/backend/ws.py index 3b15a60eb0..77e2e82a90 100644 --- a/autogpt_platform/backend/backend/ws.py +++ b/autogpt_platform/backend/backend/ws.py @@ -1,5 +1,5 @@ +from backend.api.ws_api import WebsocketServer from backend.app import run_processes -from backend.server.ws_api import WebsocketServer def main(): diff --git a/autogpt_platform/backend/test/e2e_test_data.py b/autogpt_platform/backend/test/e2e_test_data.py index 943c506f5c..d7576cdad3 100644 --- a/autogpt_platform/backend/test/e2e_test_data.py +++ b/autogpt_platform/backend/test/e2e_test_data.py @@ -23,16 +23,18 @@ from typing import Any, Dict, List from faker import Faker +# Import API functions from the backend +from backend.api.features.library.db import create_library_agent, create_preset +from backend.api.features.library.model import LibraryAgentPresetCreatable +from backend.api.features.store.db import ( + create_store_submission, + review_store_submission, +) from backend.data.auth.api_key import create_api_key from backend.data.credit import get_user_credit_model from backend.data.db import prisma from backend.data.graph import Graph, Link, Node, create_graph from backend.data.user import get_or_create_user - -# Import API functions from the backend -from backend.server.v2.library.db import create_library_agent, create_preset -from backend.server.v2.library.model import LibraryAgentPresetCreatable -from backend.server.v2.store.db import create_store_submission, review_store_submission from backend.util.clients import get_supabase faker = Faker() diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx index 8093b75965..7472ef212e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/auth/authorize/page.tsx @@ -16,7 +16,7 @@ import type { APIKeyPermission } from "@/app/api/__generated__/models/aPIKeyPerm // Human-readable scope descriptions const SCOPE_DESCRIPTIONS: { [key in APIKeyPermission]: string } = { - IDENTITY: "Read user ID, name, e-mail, and timezone", + IDENTITY: "View your user ID, e-mail, and timezone", EXECUTE_GRAPH: "Run your agents", READ_GRAPH: "View your agents and their configurations", EXECUTE_BLOCK: "Execute individual blocks", diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 3556e2f5c7..61a3600892 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -2,46 +2,33 @@ "openapi": "3.1.0", "info": { "title": "AutoGPT Agent Server", + "summary": "AutoGPT Agent Server", "description": "This server is used to execute agents that are created by the AutoGPT system.", "version": "0.1" }, "paths": { - "/api/integrations/{provider}/login": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Initiate OAuth flow", - "operationId": "getV1Initiate oauth flow", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The provider to initiate an OAuth flow for", - "description": "Provider name for integrations. Can be any string value, including custom provider names." + "/api/analytics/log_raw_analytics": { + "post": { + "tags": ["analytics"], + "summary": "Log Raw Analytics", + "operationId": "postAnalyticsLogRawAnalytics", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postAnalyticsLogRawAnalytics" + } } }, - { - "name": "scopes", - "in": "query", - "required": false, - "schema": { - "type": "string", - "title": "Comma-separated list of authorization scopes", - "default": "" - } - } - ], + "required": true + }, "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LoginResponse" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -50,9 +37,2773 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/analytics/log_raw_metric": { + "post": { + "tags": ["analytics"], + "summary": "Log Raw Metric", + "operationId": "postAnalyticsLogRawMetric", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LogRawMetricRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/api-keys": { + "get": { + "tags": ["v1", "api-keys"], + "summary": "List user API keys", + "description": "List all API keys for the user", + "operationId": "getV1List user api keys", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/APIKeyInfo" }, + "type": "array", + "title": "Response Getv1List User Api Keys" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "api-keys"], + "summary": "Create new API key", + "description": "Create a new API key", + "operationId": "postV1Create new api key", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreateAPIKeyRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAPIKeyResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/api-keys/{key_id}": { + "delete": { + "tags": ["v1", "api-keys"], + "summary": "Revoke API key", + "description": "Revoke an API key", + "operationId": "deleteV1Revoke api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v1", "api-keys"], + "summary": "Get specific API key", + "description": "Get a specific API key", + "operationId": "getV1Get specific api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/api-keys/{key_id}/permissions": { + "put": { + "tags": ["v1", "api-keys"], + "summary": "Update key permissions", + "description": "Update API key permissions", + "operationId": "putV1Update key permissions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdatePermissionsRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/api-keys/{key_id}/suspend": { + "post": { + "tags": ["v1", "api-keys"], + "summary": "Suspend API key", + "description": "Suspend an API key", + "operationId": "postV1Suspend api key", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "key_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Key Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/auth/user": { + "post": { + "tags": ["v1", "auth"], + "summary": "Get or create user", + "operationId": "postV1Get or create user", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/email": { + "post": { + "tags": ["v1", "auth"], + "summary": "Update user email", + "operationId": "postV1Update user email", + "requestBody": { + "content": { + "application/json": { + "schema": { "type": "string", "title": "Email" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Response Postv1Update User Email" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/preferences": { + "get": { + "tags": ["v1", "auth"], + "summary": "Get notification preferences", + "operationId": "getV1Get notification preferences", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreference" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "auth"], + "summary": "Update notification preferences", + "operationId": "postV1Update notification preferences", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreferenceDTO" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/NotificationPreference" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/auth/user/timezone": { + "get": { + "tags": ["v1", "auth"], + "summary": "Get user timezone", + "description": "Get user timezone setting.", + "operationId": "getV1Get user timezone", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TimezoneResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "auth"], + "summary": "Update user timezone", + "description": "Update user timezone. The timezone should be a valid IANA timezone identifier.", + "operationId": "postV1Update user timezone", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateTimezoneRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TimezoneResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/blocks": { + "get": { + "tags": ["v1", "blocks"], + "summary": "List available blocks", + "operationId": "getV1List available blocks", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "additionalProperties": true, "type": "object" }, + "type": "array", + "title": "Response Getv1List Available Blocks" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/blocks/{block_id}/execute": { + "post": { + "tags": ["v1", "blocks"], + "summary": "Execute graph block", + "operationId": "postV1Execute graph block", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "block_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Block Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": true, + "title": "Data" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": { "type": "array", "items": {} }, + "title": "Response Postv1Execute Graph Block" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/blocks": { + "get": { + "tags": ["v2"], + "summary": "Get Builder blocks", + "description": "Get blocks based on either category, type, or provider.", + "operationId": "getV2Get builder blocks", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "category", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Category" + } + }, + { + "name": "type", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "enum": ["all", "input", "action", "output"], + "type": "string" + }, + { "type": "null" } + ], + "title": "Type" + } + }, + { + "name": "provider", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + }, + { "type": "null" } + ], + "title": "Provider" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/BlockResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/blocks/batch": { + "get": { + "tags": ["v2"], + "summary": "Get specific blocks", + "description": "Get specific blocks by their IDs.", + "operationId": "getV2Get specific blocks", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "block_ids", + "in": "query", + "required": true, + "schema": { + "type": "array", + "items": { "type": "string" }, + "title": "Block Ids" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { "$ref": "#/components/schemas/BlockInfo" }, + "title": "Response Getv2Get Specific Blocks" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/categories": { + "get": { + "tags": ["v2"], + "summary": "Get Builder block categories", + "description": "Get all block categories with a specified number of blocks per category.", + "operationId": "getV2Get builder block categories", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "blocks_per_category", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 3, + "title": "Blocks Per Category" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BlockCategoryResponse" + }, + "title": "Response Getv2Get Builder Block Categories" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/counts": { + "get": { + "tags": ["v2"], + "summary": "Get Builder item counts", + "description": "Get item counts for the menu categories in the Blocks Menu.", + "operationId": "getV2Get builder item counts", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CountResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/builder/providers": { + "get": { + "tags": ["v2"], + "summary": "Get Builder integration providers", + "description": "Get all integration providers with their block counts.", + "operationId": "getV2Get builder integration providers", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ProviderResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/search": { + "get": { + "tags": ["v2", "store", "private"], + "summary": "Builder search", + "description": "Search for blocks (including integrations), marketplace agents, and user library agents.", + "operationId": "getV2Builder search", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "search_query", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search Query" + } + }, + { + "name": "filter", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "enum": [ + "blocks", + "integrations", + "marketplace_agents", + "my_agents" + ], + "type": "string" + } + }, + { "type": "null" } + ], + "title": "Filter" + } + }, + { + "name": "search_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search Id" + } + }, + { + "name": "by_creator", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "type": "array", "items": { "type": "string" } }, + { "type": "null" } + ], + "title": "By Creator" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 50, "title": "Page Size" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SearchResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/builder/suggestions": { + "get": { + "tags": ["v2"], + "summary": "Get Builder suggestions", + "description": "Get all suggestions for the Blocks Menu.", + "operationId": "getV2Get builder suggestions", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SuggestionsResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/chat/health": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Health Check", + "description": "Health check endpoint for the chat service.\n\nPerforms a full cycle test of session creation, assignment, and retrieval. Should always return healthy\nif the service and data layer are operational.\n\nReturns:\n dict: A status dictionary indicating health, service name, and API version.", + "operationId": "getV2HealthCheck", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": true, + "type": "object", + "title": "Response Getv2Healthcheck" + } + } + } + } + } + } + }, + "/api/chat/sessions": { + "post": { + "tags": ["v2", "chat", "chat"], + "summary": "Create Session", + "description": "Create a new chat session.\n\nInitiates a new chat session for either an authenticated or anonymous user.\n\nArgs:\n user_id: The optional authenticated user ID parsed from the JWT. If missing, creates an anonymous session.\n\nReturns:\n CreateSessionResponse: Details of the created session.", + "operationId": "postV2CreateSession", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSessionResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/chat/sessions/{session_id}": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Get Session", + "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.", + "operationId": "getV2GetSession", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SessionDetailResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/chat/sessions/{session_id}/assign-user": { + "patch": { + "tags": ["v2", "chat", "chat"], + "summary": "Session Assign User", + "description": "Assign an authenticated user to a chat session.\n\nUsed (typically post-login) to claim an existing anonymous session as the current authenticated user.\n\nArgs:\n session_id: The identifier for the (previously anonymous) session.\n user_id: The authenticated user's ID to associate with the session.\n\nReturns:\n dict: Status of the assignment.", + "operationId": "patchV2SessionAssignUser", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": true, + "title": "Response Patchv2Sessionassignuser" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/chat/sessions/{session_id}/stream": { + "get": { + "tags": ["v2", "chat", "chat"], + "summary": "Stream Chat", + "description": "Stream chat responses for a session.\n\nStreams the AI/completion responses in real time over Server-Sent Events (SSE), including:\n - Text fragments as they are generated\n - Tool call UI elements (if invoked)\n - Tool execution results\n\nArgs:\n session_id: The chat session identifier to associate with the streamed messages.\n message: The user's new message to process.\n user_id: Optional authenticated user ID.\n is_user_message: Whether the message is a user message.\nReturns:\n StreamingResponse: SSE-formatted response chunks.", + "operationId": "getV2StreamChat", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "session_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Session Id" } + }, + { + "name": "message", + "in": "query", + "required": true, + "schema": { + "type": "string", + "minLength": 1, + "maxLength": 10000, + "title": "Message" + } + }, + { + "name": "is_user_message", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": true, + "title": "Is User Message" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get user credits", + "operationId": "getV1Get user credits", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "integer" }, + "type": "object", + "title": "Response Getv1Get User Credits" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "patch": { + "tags": ["v1", "credits"], + "summary": "Fulfill checkout session", + "operationId": "patchV1Fulfill checkout session", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "credits"], + "summary": "Request credit top up", + "operationId": "postV1Request credit top up", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/RequestTopUp" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/admin/add_credits": { + "post": { + "tags": ["v2", "admin", "credits", "admin"], + "summary": "Add Credits to User", + "operationId": "postV2Add credits to user", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Add_credits_to_user" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserCreditsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/admin/users_history": { + "get": { + "tags": ["v2", "admin", "credits", "admin"], + "summary": "Get All Users History", + "operationId": "getV2Get all users history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "search", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 20, "title": "Page Size" } + }, + { + "name": "transaction_filter", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/CreditTransactionType" }, + { "type": "null" } + ], + "title": "Transaction Filter" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UserHistoryResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits/auto-top-up": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get auto top up", + "operationId": "getV1Get auto top up", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "credits"], + "summary": "Configure auto top up", + "operationId": "postV1Configure auto top up", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "string", + "title": "Response Postv1Configure Auto Top Up" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/manage": { + "get": { + "tags": ["v1", "credits"], + "summary": "Manage payment methods", + "operationId": "getV1Manage payment methods", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "additionalProperties": { "type": "string" }, + "type": "object", + "title": "Response Getv1Manage Payment Methods" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/refunds": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get refund requests", + "operationId": "getV1Get refund requests", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/RefundRequest" }, + "type": "array", + "title": "Response Getv1Get Refund Requests" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/credits/stripe_webhook": { + "post": { + "tags": ["v1", "credits"], + "summary": "Handle Stripe webhooks", + "operationId": "postV1Handle stripe webhooks", + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + } + } + } + }, + "/api/credits/transactions": { + "get": { + "tags": ["v1", "credits"], + "summary": "Get credit history", + "operationId": "getV1Get credit history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "transaction_time", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "type": "string", "format": "date-time" }, + { "type": "null" } + ], + "title": "Transaction Time" + } + }, + { + "name": "transaction_type", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Transaction Type" + } + }, + { + "name": "transaction_count_limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 100, + "title": "Transaction Count Limit" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/TransactionHistory" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/credits/{transaction_key}/refund": { + "post": { + "tags": ["v1", "credits"], + "summary": "Refund credit transaction", + "operationId": "postV1Refund credit transaction", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "transaction_key", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Transaction Key" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "object", + "additionalProperties": { "type": "string" }, + "title": "Metadata" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "integer", + "title": "Response Postv1Refund Credit Transaction" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/email/": { + "post": { + "tags": ["v1", "email"], + "summary": "Handle Postmark Email Webhooks", + "operationId": "postV1Handle postmark email webhooks", + "requestBody": { + "content": { + "application/json": { + "schema": { + "oneOf": [ + { "$ref": "#/components/schemas/PostmarkDeliveryWebhook" }, + { "$ref": "#/components/schemas/PostmarkBounceWebhook" }, + { + "$ref": "#/components/schemas/PostmarkSpamComplaintWebhook" + }, + { "$ref": "#/components/schemas/PostmarkOpenWebhook" }, + { "$ref": "#/components/schemas/PostmarkClickWebhook" }, + { + "$ref": "#/components/schemas/PostmarkSubscriptionChangeWebhook" + } + ], + "title": "Webhook", + "discriminator": { + "propertyName": "RecordType", + "mapping": { + "Delivery": "#/components/schemas/PostmarkDeliveryWebhook", + "Bounce": "#/components/schemas/PostmarkBounceWebhook", + "SpamComplaint": "#/components/schemas/PostmarkSpamComplaintWebhook", + "Open": "#/components/schemas/PostmarkOpenWebhook", + "Click": "#/components/schemas/PostmarkClickWebhook", + "SubscriptionChange": "#/components/schemas/PostmarkSubscriptionChangeWebhook" + } + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "APIKeyAuthenticator-X-Postmark-Webhook-Token": [] }] + } + }, + "/api/email/unsubscribe": { + "post": { + "tags": ["v1", "email"], + "summary": "One Click Email Unsubscribe", + "operationId": "postV1One click email unsubscribe", + "parameters": [ + { + "name": "token", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Token" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/executions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List all executions", + "operationId": "getV1List all executions", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/GraphExecutionMeta" + }, + "type": "array", + "title": "Response Getv1List All Executions" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/admin/execution_accuracy_trends": { + "get": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Get Execution Accuracy Trends and Alerts", + "description": "Get execution accuracy trends with moving averages and alert detection.\nSimple single-query approach.", + "operationId": "getV2Get execution accuracy trends and alerts", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "user_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "User Id" + } + }, + { + "name": "days_back", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 30, "title": "Days Back" } + }, + { + "name": "drop_threshold", + "in": "query", + "required": false, + "schema": { + "type": "number", + "default": 10.0, + "title": "Drop Threshold" + } + }, + { + "name": "include_historical", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Include Historical" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AccuracyTrendsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/executions/admin/execution_analytics": { + "post": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Generate Execution Analytics", + "description": "Generate activity summaries and correctness scores for graph executions.\n\nThis endpoint:\n1. Fetches all completed executions matching the criteria\n2. Identifies executions missing activity_status or correctness_score\n3. Generates missing data using AI in batches\n4. Updates the database with new stats\n5. Returns a detailed report of the analytics operation", + "operationId": "postV2Generate execution analytics", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/admin/execution_analytics/config": { + "get": { + "tags": ["v2", "admin", "admin", "execution_analytics"], + "summary": "Get Execution Analytics Configuration", + "description": "Get the configuration for execution analytics including:\n- Available AI models with metadata\n- Default system and user prompts\n- Recommended model selection", + "operationId": "getV2Get execution analytics configuration", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExecutionAnalyticsConfig" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/executions/{graph_exec_id}": { + "delete": { + "tags": ["v1", "graphs"], + "summary": "Delete graph execution", + "operationId": "deleteV1Delete graph execution", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/files/upload": { + "post": { + "tags": ["v1", "files"], + "summary": "Upload file to cloud storage", + "description": "Upload a file to cloud storage and return a storage key that can be used\nwith FileStoreBlock and AgentFileInputBlock.\n\nArgs:\n file: The file to upload\n user_id: The user ID\n provider: Cloud storage provider (\"gcs\", \"s3\", \"azure\")\n expiration_hours: Hours until file expires (1-48)\n\nReturns:\n Dict containing the cloud storage path and signed URL", + "operationId": "postV1Upload file to cloud storage", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "query", + "required": false, + "schema": { + "type": "string", + "default": "gcs", + "title": "Provider" + } + }, + { + "name": "expiration_hours", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "default": 24, + "title": "Expiration Hours" + } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postV1Upload_file_to_cloud_storage" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UploadFileResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List user graphs", + "operationId": "getV1List user graphs", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "$ref": "#/components/schemas/GraphMeta" }, + "type": "array", + "title": "Response Getv1List User Graphs" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v1", "graphs"], + "summary": "Create new graph", + "operationId": "postV1Create new graph", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreateGraph" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/graphs/{graph_id}": { + "delete": { + "tags": ["v1", "graphs"], + "summary": "Delete graph permanently", + "operationId": "deleteV1Delete graph permanently", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/DeleteGraphResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v1", "graphs"], + "summary": "Get specific graph", + "operationId": "getV1Get specific graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + }, + { + "name": "for_export", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "For Export" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "put": { + "tags": ["v1", "graphs"], + "summary": "Update graph version", + "operationId": "putV1Update graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Graph" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/execute/{graph_version}": { + "post": { + "tags": ["v1", "graphs"], + "summary": "Execute graph agent", + "operationId": "postV1Execute graph agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_version", + "in": "path", + "required": true, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version" + } + }, + { + "name": "preset_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Preset Id" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV1Execute_graph_agent" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "List graph executions", + "operationId": "getV1List graph executions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number (1-indexed)", + "default": 1, + "title": "Page" + }, + "description": "Page number (1-indexed)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 100, + "minimum": 1, + "description": "Number of executions per page", + "default": 25, + "title": "Page Size" + }, + "description": "Number of executions per page" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GraphExecutionsPaginated" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get execution details", + "operationId": "getV1Get execution details", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/GraphExecution" }, + { "$ref": "#/components/schemas/GraphExecutionWithNodes" } + ], + "title": "Response Getv1Get Execution Details" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}/share": { + "delete": { + "tags": ["v1"], + "summary": "Disable Execution Sharing", + "description": "Disable sharing for a graph execution.", + "operationId": "deleteV1DisableExecutionSharing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v1"], + "summary": "Enable Execution Sharing", + "description": "Enable sharing for a graph execution.", + "operationId": "postV1EnableExecutionSharing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ShareRequest", + "default": {} + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ShareResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/executions/{graph_exec_id}/stop": { + "post": { + "tags": ["v1", "graphs"], + "summary": "Stop graph execution", + "operationId": "postV1Stop graph execution", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "graph_exec_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Exec Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/GraphExecutionMeta" }, + { "type": "null" } + ], + "title": "Response Postv1Stop Graph Execution" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/schedules": { + "get": { + "tags": ["v1", "schedules"], + "summary": "List execution schedules for a graph", + "operationId": "getV1List execution schedules for a graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/GraphExecutionJobInfo" + }, + "title": "Response Getv1List Execution Schedules For A Graph" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v1", "schedules"], + "summary": "Create execution schedule", + "operationId": "postV1Create execution schedule", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "description": "ID of the graph to schedule", + "title": "Graph Id" + }, + "description": "ID of the graph to schedule" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ScheduleCreationRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GraphExecutionJobInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/settings": { + "patch": { + "tags": ["v1", "graphs"], + "summary": "Update graph settings", + "description": "Update graph settings for the user's library agent.", + "operationId": "patchV1Update graph settings", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphSettings" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphSettings" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get all graph versions", + "operationId": "getV1Get all graph versions", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { "$ref": "#/components/schemas/GraphModel" }, + "title": "Response Getv1Get All Graph Versions" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions/active": { + "put": { + "tags": ["v1", "graphs"], + "summary": "Set active graph version", + "operationId": "putV1Set active graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/SetGraphActiveVersion" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/graphs/{graph_id}/versions/{version}": { + "get": { + "tags": ["v1", "graphs"], + "summary": "Get graph version", + "operationId": "getV1Get graph version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "path", + "required": true, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + }, + { + "name": "for_export", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "For Export" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphModel" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/integrations/ayrshare/sso_url": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Ayrshare Sso Url", + "description": "Generate an SSO URL for Ayrshare social media integration.\n\nReturns:\n dict: Contains the SSO URL for Ayrshare integration", + "operationId": "getV1GetAyrshareSsoUrl", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AyrshareSSOResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/integrations/credentials": { + "get": { + "tags": ["v1", "integrations"], + "summary": "List Credentials", + "operationId": "getV1List credentials", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/CredentialsMetaResponse" + }, + "type": "array", + "title": "Response Getv1List Credentials" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/integrations/providers": { + "get": { + "tags": ["v1", "integrations"], + "summary": "List Providers", + "description": "Get a list of all available provider names.\n\nReturns both statically defined providers (from ProviderName enum)\nand dynamically registered providers (from SDK decorators).\n\nNote: The complete list of provider names is also available as a constant\nin the generated TypeScript client via PROVIDER_NAMES.", + "operationId": "getV1ListProviders", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { "type": "string" }, + "type": "array", + "title": "Response Getv1Listproviders" + } + } + } + } + } + } + }, + "/api/integrations/providers/constants": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Constants", + "description": "Get provider names as constants.\n\nThis endpoint returns a model with provider names as constants,\nspecifically designed for OpenAPI code generation tools to create\nTypeScript constants.", + "operationId": "getV1GetProviderConstants", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/ProviderConstants" } + } + } + } + } + } + }, + "/api/integrations/providers/enum-example": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Enum Example", + "description": "Example endpoint that uses the CompleteProviderNames enum.\n\nThis endpoint exists to ensure that the CompleteProviderNames enum is included\nin the OpenAPI schema, which will cause Orval to generate it as a\nTypeScript enum/constant.", + "operationId": "getV1GetProviderEnumExample", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderEnumResponse" + } + } + } + } + } + } + }, + "/api/integrations/providers/names": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Provider Names", + "description": "Get all provider names in a structured format.\n\nThis endpoint is specifically designed to expose the provider names\nin the OpenAPI schema so that code generators like Orval can create\nappropriate TypeScript constants.", + "operationId": "getV1GetProviderNames", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ProviderNamesResponse" + } + } + } + } + } + } + }, + "/api/integrations/webhooks/{webhook_id}/ping": { + "post": { + "tags": ["v1", "integrations"], + "summary": "Webhook Ping", + "operationId": "postV1WebhookPing", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "webhook_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Our ID for the webhook" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -96,6 +2847,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -103,40 +2857,10 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/integrations/credentials": { - "get": { - "tags": ["v1", "integrations"], - "summary": "List Credentials", - "operationId": "getV1List credentials", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/CredentialsMetaResponse" - }, - "type": "array", - "title": "Response Getv1List Credentials" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, "/api/integrations/{provider}/credentials": { "get": { "tags": ["v1", "integrations"], @@ -170,6 +2894,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -177,9 +2904,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } }, @@ -253,6 +2977,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -260,81 +2987,11 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, "/api/integrations/{provider}/credentials/{cred_id}": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Specific Credential By ID", - "operationId": "getV1Get specific credential by id", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The provider to retrieve credentials for", - "description": "Provider name for integrations. Can be any string value, including custom provider names." - } - }, - { - "name": "cred_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "title": "The ID of the credentials to retrieve" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "oneOf": [ - { "$ref": "#/components/schemas/OAuth2Credentials" }, - { "$ref": "#/components/schemas/APIKeyCredentials" }, - { "$ref": "#/components/schemas/UserPasswordCredentials" }, - { - "$ref": "#/components/schemas/HostScopedCredentials-Output" - } - ], - "discriminator": { - "propertyName": "type", - "mapping": { - "oauth2": "#/components/schemas/OAuth2Credentials", - "api_key": "#/components/schemas/APIKeyCredentials", - "user_password": "#/components/schemas/UserPasswordCredentials", - "host_scoped": "#/components/schemas/HostScopedCredentials-Output" - } - }, - "title": "Response Getv1Get Specific Credential By Id" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, "delete": { "tags": ["v1", "integrations"], "summary": "Delete Credentials", @@ -390,6 +3047,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -397,9 +3057,124 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + }, + "get": { + "tags": ["v1", "integrations"], + "summary": "Get Specific Credential By ID", + "operationId": "getV1Get specific credential by id", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The provider to retrieve credentials for", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + } + }, + { + "name": "cred_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The ID of the credentials to retrieve" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { "$ref": "#/components/schemas/OAuth2Credentials" }, + { "$ref": "#/components/schemas/APIKeyCredentials" }, + { "$ref": "#/components/schemas/UserPasswordCredentials" }, + { + "$ref": "#/components/schemas/HostScopedCredentials-Output" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "oauth2": "#/components/schemas/OAuth2Credentials", + "api_key": "#/components/schemas/APIKeyCredentials", + "user_password": "#/components/schemas/UserPasswordCredentials", + "host_scoped": "#/components/schemas/HostScopedCredentials-Output" + } + }, + "title": "Response Getv1Get Specific Credential By Id" + } + } + } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/integrations/{provider}/login": { + "get": { + "tags": ["v1", "integrations"], + "summary": "Initiate OAuth flow", + "operationId": "getV1Initiate oauth flow", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "provider", + "in": "path", + "required": true, + "schema": { + "type": "string", + "title": "The provider to initiate an OAuth flow for", + "description": "Provider name for integrations. Can be any string value, including custom provider names." + } + }, + { + "name": "scopes", + "in": "query", + "required": false, + "schema": { + "type": "string", + "title": "Comma-separated list of authorization scopes", + "default": "" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LoginResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -443,18 +3218,300 @@ } } }, - "/api/integrations/webhooks/{webhook_id}/ping": { - "post": { - "tags": ["v1", "integrations"], - "summary": "Webhook Ping", - "operationId": "postV1WebhookPing", + "/api/library/agents": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "List Library Agents", + "description": "Get all agents in the user's library (both created and saved).\n\nArgs:\n user_id: ID of the authenticated user.\n search_term: Optional search term to filter agents by name/description.\n filter_by: List of filters to apply (favorites, created by user).\n sort_by: List of sorting criteria (created date, updated date).\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", + "operationId": "getV2List library agents", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "webhook_id", + "name": "search_term", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "description": "Search term to filter agents", + "title": "Search Term" + }, + "description": "Search term to filter agents" + }, + { + "name": "sort_by", + "in": "query", + "required": false, + "schema": { + "$ref": "#/components/schemas/LibraryAgentSort", + "description": "Criteria to sort results by", + "default": "updatedAt" + }, + "description": "Criteria to sort results by" + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number to retrieve (must be >= 1)", + "default": 1, + "title": "Page" + }, + "description": "Page number to retrieve (must be >= 1)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Number of agents per page (must be >= 1)", + "default": 15, + "title": "Page Size" + }, + "description": "Number of agents per page (must be >= 1)" + } + ], + "responses": { + "200": { + "description": "List of library agents", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + }, + "post": { + "tags": ["v2", "library", "private"], + "summary": "Add Marketplace Agent", + "description": "Add an agent from the marketplace to the user's library.\n\nArgs:\n store_listing_version_id: ID of the store listing version to add.\n user_id: ID of the authenticated user.\n\nReturns:\n library_model.LibraryAgent: Agent added to the library\n\nRaises:\n HTTPException(404): If the listing version is not found.\n HTTPException(500): If a server/database error occurs.", + "operationId": "postV2Add marketplace agent", + "security": [{ "HTTPBearerJWT": [] }], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Add_marketplace_agent" + } + } + } + }, + "responses": { + "201": { + "description": "Agent added successfully", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Store listing version not found" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { "description": "Server error" } + } + } + }, + "/api/library/agents/by-graph/{graph_id}": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "Get Library Agent By Graph Id", + "operationId": "getV2GetLibraryAgentByGraphId", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "graph_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Our ID for the webhook" } + "schema": { "type": "string", "title": "Graph Id" } + }, + { + "name": "version", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Version" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/agents/favorites": { + "get": { + "tags": ["v2", "library", "private"], + "summary": "List Favorite Library Agents", + "description": "Get all favorite agents in the user's library.\n\nArgs:\n user_id: ID of the authenticated user.\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing favorite agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", + "operationId": "getV2List favorite library agents", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number to retrieve (must be >= 1)", + "default": 1, + "title": "Page" + }, + "description": "Page number to retrieve (must be >= 1)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Number of agents per page (must be >= 1)", + "default": 15, + "title": "Page Size" + }, + "description": "Number of agents per page (must be >= 1)" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + } + }, + "/api/library/agents/marketplace/{store_listing_version_id}": { + "get": { + "tags": ["v2", "library", "private", "store", "library"], + "summary": "Get Agent By Store ID", + "description": "Get Library Agent from Store Listing Version ID.", + "operationId": "getV2Get agent by store id", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Library agent found", + "content": { + "application/json": { + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/LibraryAgent" }, + { "type": "null" } + ], + "title": "Response Getv2Get Agent By Store Id" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Agent not found" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/agents/{library_agent_id}": { + "delete": { + "tags": ["v2", "library", "private"], + "summary": "Delete Library Agent", + "description": "Soft-delete the specified library agent.\n\nArgs:\n library_agent_id: ID of the library agent to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.\n\nRaises:\n HTTPException(404): If the agent does not exist.\n HTTPException(500): If a server/database error occurs.", + "operationId": "deleteV2Delete library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } } ], "responses": { @@ -462,6 +3519,11 @@ "description": "Successful Response", "content": { "application/json": { "schema": {} } } }, + "204": { "description": "Agent deleted successfully" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Agent not found" }, "422": { "description": "Validation Error", "content": { @@ -470,139 +3532,503 @@ } } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } + "500": { "description": "Server error" } } - } - }, - "/api/integrations/ayrshare/sso_url": { + }, "get": { - "tags": ["v1", "integrations"], - "summary": "Get Ayrshare Sso Url", - "description": "Generate an SSO URL for Ayrshare social media integration.\n\nReturns:\n dict: Contains the SSO URL for Ayrshare integration", - "operationId": "getV1GetAyrshareSsoUrl", + "tags": ["v2", "library", "private"], + "summary": "Get Library Agent", + "operationId": "getV2Get library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/AyrshareSSOResponse" } + "schema": { "$ref": "#/components/schemas/LibraryAgent" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "patch": { + "tags": ["v2", "library", "private"], + "summary": "Update Library Agent", + "description": "Update the library agent with the given fields.\n\nArgs:\n library_agent_id: ID of the library agent to update.\n payload: Fields to update (auto_update_version, is_favorite, etc.).\n user_id: ID of the authenticated user.\n\nRaises:\n HTTPException(500): If a server/database error occurs.", + "operationId": "patchV2Update library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentUpdateRequest" + } + } } }, - "security": [{ "HTTPBearerJWT": [] }] + "responses": { + "200": { + "description": "Agent updated successfully", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { "description": "Server error" } + } } }, - "/api/integrations/providers": { + "/api/library/agents/{library_agent_id}/fork": { + "post": { + "tags": ["v2", "library", "private"], + "summary": "Fork Library Agent", + "operationId": "postV2Fork library agent", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "library_agent_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Library Agent Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgent" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets": { "get": { - "tags": ["v1", "integrations"], - "summary": "List Providers", - "description": "Get a list of all available provider names.\n\nReturns both statically defined providers (from ProviderName enum)\nand dynamically registered providers (from SDK decorators).\n\nNote: The complete list of provider names is also available as a constant\nin the generated TypeScript client via PROVIDER_NAMES.", - "operationId": "getV1ListProviders", + "tags": ["v2", "presets"], + "summary": "List presets", + "description": "Retrieve a paginated list of presets for the current user.", + "operationId": "getV2List presets", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "default": 1, + "title": "Page" + } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "default": 10, + "title": "Page Size" + } + }, + { + "name": "graph_id", + "in": "query", + "required": true, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "description": "Allows to filter presets by a specific agent graph", + "title": "Graph Id" + }, + "description": "Allows to filter presets by a specific agent graph" + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "items": { "type": "string" }, + "$ref": "#/components/schemas/LibraryAgentPresetResponse" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "post": { + "tags": ["v2", "presets"], + "summary": "Create a new preset", + "description": "Create a new preset for the current user.", + "operationId": "postV2Create a new preset", + "security": [{ "HTTPBearerJWT": [] }], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "anyOf": [ + { + "$ref": "#/components/schemas/LibraryAgentPresetCreatable" + }, + { + "$ref": "#/components/schemas/LibraryAgentPresetCreatableFromGraphExecution" + } + ], + "title": "Preset" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets/setup-trigger": { + "post": { + "tags": ["v2", "presets"], + "summary": "Setup Trigger", + "description": "Sets up a webhook-triggered `LibraryAgentPreset` for a `LibraryAgent`.\nReturns the correspondingly created `LibraryAgentPreset` with `webhook_id` set.", + "operationId": "postV2SetupTrigger", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TriggeredPresetSetupRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/library/presets/{preset_id}": { + "delete": { + "tags": ["v2", "presets"], + "summary": "Delete a preset", + "description": "Delete an existing preset by its ID.", + "operationId": "deleteV2Delete a preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "responses": { + "204": { "description": "Successful Response" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "get": { + "tags": ["v2", "presets"], + "summary": "Get a specific preset", + "description": "Retrieve details for a specific preset by its ID.", + "operationId": "getV2Get a specific preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + }, + "patch": { + "tags": ["v2", "presets"], + "summary": "Update an existing preset", + "description": "Update an existing preset by its ID.", + "operationId": "patchV2Update an existing preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LibraryAgentPresetUpdatable" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/library/presets/{preset_id}/execute": { + "post": { + "tags": ["v2", "presets", "presets"], + "summary": "Execute a preset", + "description": "Execute a preset with the given graph and node input for the current user.", + "operationId": "postV2Execute a preset", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "preset_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Preset Id" } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Execute_a_preset" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/app/{client_id}": { + "get": { + "tags": ["oauth"], + "summary": "Get Oauth App Info", + "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", + "operationId": "getOauthGetOauthAppInfo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "client_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Client Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationPublicInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "404": { "description": "Application not found or disabled" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/mine": { + "get": { + "tags": ["oauth"], + "summary": "List My Oauth Apps", + "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", + "operationId": "getOauthListMyOauthApps", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "items": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + }, "type": "array", - "title": "Response Getv1Listproviders" + "title": "Response Getoauthlistmyoauthapps" } } } - } - } - } - }, - "/api/integrations/providers/names": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Names", - "description": "Get all provider names in a structured format.\n\nThis endpoint is specifically designed to expose the provider names\nin the OpenAPI schema so that code generators like Orval can create\nappropriate TypeScript constants.", - "operationId": "getV1GetProviderNames", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProviderNamesResponse" - } - } - } - } - } - } - }, - "/api/integrations/providers/constants": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Constants", - "description": "Get provider names as constants.\n\nThis endpoint returns a model with provider names as constants,\nspecifically designed for OpenAPI code generation tools to create\nTypeScript constants.", - "operationId": "getV1GetProviderConstants", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProviderConstants" } - } - } - } - } - } - }, - "/api/integrations/providers/enum-example": { - "get": { - "tags": ["v1", "integrations"], - "summary": "Get Provider Enum Example", - "description": "Example endpoint that uses the CompleteProviderNames enum.\n\nThis endpoint exists to ensure that the CompleteProviderNames enum is included\nin the OpenAPI schema, which will cause Orval to generate it as a\nTypeScript enum/constant.", - "operationId": "getV1GetProviderEnumExample", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ProviderEnumResponse" - } - } - } - } - } - } - }, - "/api/analytics/log_raw_metric": { - "post": { - "tags": ["v1", "analytics"], - "summary": "Log Raw Metric", - "operationId": "postV1LogRawMetric", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LogRawMetricRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" @@ -611,21 +4037,244 @@ "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/analytics/log_raw_analytics": { + "/api/oauth/apps/{app_id}/logo": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Logo", + "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/{app_id}/logo/upload": { "post": { - "tags": ["v1", "analytics"], - "summary": "Log Raw Analytics", - "operationId": "postV1LogRawAnalytics", + "tags": ["oauth"], + "summary": "Upload App Logo", + "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", + "operationId": "postOauthUploadAppLogo", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/apps/{app_id}/status": { + "patch": { + "tags": ["oauth"], + "summary": "Update App Status", + "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", + "operationId": "patchOauthUpdateAppStatus", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "app_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "App Id" } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OAuthApplicationInfo" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/authorize": { + "post": { + "tags": ["oauth"], + "summary": "Authorize", + "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", + "operationId": "postOauthAuthorize", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/oauth/introspect": { + "post": { + "tags": ["oauth"], + "summary": "Introspect", + "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", + "operationId": "postOauthIntrospect", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Body_postV1LogRawAnalytics" + "$ref": "#/components/schemas/Body_postOauthIntrospect" } } }, "required": true }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TokenIntrospectionResult" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/oauth/revoke": { + "post": { + "tags": ["oauth"], + "summary": "Revoke", + "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", + "operationId": "postOauthRevoke", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } + } + }, + "required": true + }, "responses": { "200": { "description": "Successful Response", @@ -638,161 +4287,25 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, - "/api/auth/user": { + "/api/oauth/token": { "post": { - "tags": ["v1", "auth"], - "summary": "Get or create user", - "operationId": "postV1Get or create user", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/email": { - "post": { - "tags": ["v1", "auth"], - "summary": "Update user email", - "operationId": "postV1Update user email", - "requestBody": { - "content": { - "application/json": { - "schema": { "type": "string", "title": "Email" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "string" }, - "type": "object", - "title": "Response Postv1Update User Email" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/timezone": { - "get": { - "tags": ["v1", "auth"], - "summary": "Get user timezone", - "description": "Get user timezone setting.", - "operationId": "getV1Get user timezone", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TimezoneResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "auth"], - "summary": "Update user timezone", - "description": "Update user timezone. The timezone should be a valid IANA timezone identifier.", - "operationId": "postV1Update user timezone", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UpdateTimezoneRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TimezoneResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/auth/user/preferences": { - "get": { - "tags": ["v1", "auth"], - "summary": "Get notification preferences", - "operationId": "getV1Get notification preferences", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/NotificationPreference" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "auth"], - "summary": "Update notification preferences", - "operationId": "postV1Update notification preferences", + "tags": ["oauth"], + "summary": "Token", + "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", + "operationId": "postOauthToken", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/NotificationPreferenceDTO" + "anyOf": [ + { "$ref": "#/components/schemas/TokenRequestByCode" }, + { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } + ], + "title": "Request" } } }, @@ -803,9 +4316,7 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/NotificationPreference" - } + "schema": { "$ref": "#/components/schemas/TokenResponse" } } } }, @@ -816,12 +4327,8 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, "/api/onboarding": { @@ -865,6 +4372,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -872,61 +4382,11 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } }, "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/onboarding/step": { - "post": { - "tags": ["v1", "onboarding"], - "summary": "Complete onboarding step", - "operationId": "postV1Complete onboarding step", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "step", - "in": "query", - "required": true, - "schema": { - "enum": [ - "WELCOME", - "USAGE_REASON", - "INTEGRATIONS", - "AGENT_CHOICE", - "AGENT_NEW_RUN", - "AGENT_INPUT", - "CONGRATS", - "MARKETPLACE_VISIT", - "BUILDER_OPEN" - ], - "type": "string", - "title": "Step" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, "/api/onboarding/agents": { "get": { "tags": ["v1", "onboarding"], @@ -997,413 +4457,41 @@ "security": [{ "HTTPBearerJWT": [] }] } }, - "/api/blocks": { - "get": { - "tags": ["v1", "blocks"], - "summary": "List available blocks", - "operationId": "getV1List available blocks", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "additionalProperties": true, "type": "object" }, - "type": "array", - "title": "Response Getv1List Available Blocks" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/blocks/{block_id}/execute": { + "/api/onboarding/step": { "post": { - "tags": ["v1", "blocks"], - "summary": "Execute graph block", - "operationId": "postV1Execute graph block", + "tags": ["v1", "onboarding"], + "summary": "Complete onboarding step", + "operationId": "postV1Complete onboarding step", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "block_id", - "in": "path", + "name": "step", + "in": "query", "required": true, - "schema": { "type": "string", "title": "Block Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": true, - "title": "Data" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": { "type": "array", "items": {} }, - "title": "Response Postv1Execute Graph Block" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/files/upload": { - "post": { - "tags": ["v1", "files"], - "summary": "Upload file to cloud storage", - "description": "Upload a file to cloud storage and return a storage key that can be used\nwith FileStoreBlock and AgentFileInputBlock.\n\nArgs:\n file: The file to upload\n user_id: The user ID\n provider: Cloud storage provider (\"gcs\", \"s3\", \"azure\")\n expiration_hours: Hours until file expires (1-48)\n\nReturns:\n Dict containing the cloud storage path and signed URL", - "operationId": "postV1Upload file to cloud storage", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "provider", - "in": "query", - "required": false, "schema": { - "type": "string", - "default": "gcs", - "title": "Provider" - } - }, - { - "name": "expiration_hours", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 24, - "title": "Expiration Hours" - } - } - ], - "requestBody": { - "required": true, - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postV1Upload_file_to_cloud_storage" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UploadFileResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get user credits", - "operationId": "getV1Get user credits", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "integer" }, - "type": "object", - "title": "Response Getv1Get User Credits" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "credits"], - "summary": "Request credit top up", - "operationId": "postV1Request credit top up", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/RequestTopUp" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "patch": { - "tags": ["v1", "credits"], - "summary": "Fulfill checkout session", - "operationId": "patchV1Fulfill checkout session", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/{transaction_key}/refund": { - "post": { - "tags": ["v1", "credits"], - "summary": "Refund credit transaction", - "operationId": "postV1Refund credit transaction", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "transaction_key", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Transaction Key" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": { "type": "string" }, - "title": "Metadata" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "integer", - "title": "Response Postv1Refund Credit Transaction" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits/auto-top-up": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get auto top up", - "operationId": "getV1Get auto top up", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "credits"], - "summary": "Configure auto top up", - "operationId": "postV1Configure auto top up", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AutoTopUpConfig" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "string", - "title": "Response Postv1Configure Auto Top Up" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/stripe_webhook": { - "post": { - "tags": ["v1", "credits"], - "summary": "Handle Stripe webhooks", - "operationId": "postV1Handle stripe webhooks", - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - } - } - } - }, - "/api/credits/manage": { - "get": { - "tags": ["v1", "credits"], - "summary": "Manage payment methods", - "operationId": "getV1Manage payment methods", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": { "type": "string" }, - "type": "object", - "title": "Response Getv1Manage Payment Methods" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/transactions": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get credit history", - "operationId": "getV1Get credit history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "transaction_time", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "type": "string", "format": "date-time" }, - { "type": "null" } + "enum": [ + "WELCOME", + "USAGE_REASON", + "INTEGRATIONS", + "AGENT_CHOICE", + "AGENT_NEW_RUN", + "AGENT_INPUT", + "CONGRATS", + "MARKETPLACE_VISIT", + "BUILDER_OPEN" ], - "title": "Transaction Time" - } - }, - { - "name": "transaction_type", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Transaction Type" - } - }, - { - "name": "transaction_count_limit", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 100, - "title": "Transaction Count Limit" + "type": "string", + "title": "Step" } } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TransactionHistory" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -1412,70 +4500,20 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/credits/refunds": { - "get": { - "tags": ["v1", "credits"], - "summary": "Get refund requests", - "operationId": "getV1Get refund requests", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "$ref": "#/components/schemas/RefundRequest" }, - "type": "array", - "title": "Response Getv1Get Refund Requests" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List user graphs", - "operationId": "getV1List user graphs", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { "$ref": "#/components/schemas/GraphMeta" }, - "type": "array", - "title": "Response Getv1List User Graphs" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, + "/api/otto/ask": { "post": { - "tags": ["v1", "graphs"], - "summary": "Create new graph", - "operationId": "postV1Create new graph", + "tags": ["v2", "otto"], + "summary": "Proxy Otto Chat Request", + "description": "Proxy requests to Otto API while adding necessary security headers and logging.\nRequires an authenticated user.", + "operationId": "postV2Proxy otto chat request", "requestBody": { "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/CreateGraph" } + "schema": { "$ref": "#/components/schemas/ChatRequest" } } }, "required": true @@ -1485,10 +4523,13 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } + "schema": { "$ref": "#/components/schemas/ApiResponse" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -1496,704 +4537,9 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs/{graph_id}/versions/{version}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get graph version", - "operationId": "getV1Get graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "path", - "required": true, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - }, - { - "name": "for_export", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "For Export" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get specific graph", - "operationId": "getV1Get specific graph", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - }, - { - "name": "for_export", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "For Export" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1", "graphs"], - "summary": "Delete graph permanently", - "operationId": "deleteV1Delete graph permanently", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/DeleteGraphResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "put": { - "tags": ["v1", "graphs"], - "summary": "Update graph version", - "operationId": "putV1Update graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Graph" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphModel" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/versions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get all graph versions", - "operationId": "getV1Get all graph versions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { "$ref": "#/components/schemas/GraphModel" }, - "title": "Response Getv1Get All Graph Versions" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/versions/active": { - "put": { - "tags": ["v1", "graphs"], - "summary": "Set active graph version", - "operationId": "putV1Set active graph version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SetGraphActiveVersion" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/settings": { - "patch": { - "tags": ["v1", "graphs"], - "summary": "Update graph settings", - "description": "Update graph settings for the user's library agent.", - "operationId": "patchV1Update graph settings", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphSettings" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphSettings" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/execute/{graph_version}": { - "post": { - "tags": ["v1", "graphs"], - "summary": "Execute graph agent", - "operationId": "postV1Execute graph agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_version", - "in": "path", - "required": true, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Graph Version" - } - }, - { - "name": "preset_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Preset Id" - } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV1Execute_graph_agent" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}/stop": { - "post": { - "tags": ["v1", "graphs"], - "summary": "Stop graph execution", - "operationId": "postV1Stop graph execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/GraphExecutionMeta" }, - { "type": "null" } - ], - "title": "Response Postv1Stop Graph Execution" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List all executions", - "operationId": "getV1List all executions", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/GraphExecutionMeta" - }, - "type": "array", - "title": "Response Getv1List All Executions" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/graphs/{graph_id}/executions": { - "get": { - "tags": ["v1", "graphs"], - "summary": "List graph executions", - "operationId": "getV1List graph executions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number (1-indexed)", - "default": 1, - "title": "Page" - }, - "description": "Page number (1-indexed)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "maximum": 100, - "minimum": 1, - "description": "Number of executions per page", - "default": 25, - "title": "Page Size" - }, - "description": "Number of executions per page" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GraphExecutionsPaginated" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}": { - "get": { - "tags": ["v1", "graphs"], - "summary": "Get execution details", - "operationId": "getV1Get execution details", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/GraphExecution" }, - { "$ref": "#/components/schemas/GraphExecutionWithNodes" } - ], - "title": "Response Getv1Get Execution Details" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions/{graph_exec_id}": { - "delete": { - "tags": ["v1", "graphs"], - "summary": "Delete graph execution", - "operationId": "deleteV1Delete graph execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/graphs/{graph_id}/executions/{graph_exec_id}/share": { - "post": { - "tags": ["v1"], - "summary": "Enable Execution Sharing", - "description": "Enable sharing for a graph execution.", - "operationId": "postV1EnableExecutionSharing", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ShareRequest", - "default": {} - } - } } }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ShareResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1"], - "summary": "Disable Execution Sharing", - "description": "Disable sharing for a graph execution.", - "operationId": "deleteV1DisableExecutionSharing", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } + "security": [{ "HTTPBearerJWT": [] }] } }, "/api/public/shared/{share_token}": { @@ -2236,46 +4582,32 @@ } } }, - "/api/graphs/{graph_id}/schedules": { + "/api/review/action": { "post": { - "tags": ["v1", "schedules"], - "summary": "Create execution schedule", - "operationId": "postV1Create execution schedule", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "ID of the graph to schedule", - "title": "Graph Id" - }, - "description": "ID of the graph to schedule" - } - ], + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Process Review Action", + "description": "Process reviews with approve or reject actions.", + "operationId": "postV2ProcessReviewAction", "requestBody": { - "required": true, "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/ScheduleCreationRequest" - } + "schema": { "$ref": "#/components/schemas/ReviewRequest" } } - } + }, + "required": true }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/GraphExecutionJobInfo" - } + "schema": { "$ref": "#/components/schemas/ReviewResponse" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2283,40 +4615,115 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - } - }, + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, + "/api/review/execution/{graph_exec_id}": { "get": { - "tags": ["v1", "schedules"], - "summary": "List execution schedules for a graph", - "operationId": "getV1List execution schedules for a graph", + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Get Pending Reviews for Execution", + "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", + "operationId": "getV2Get pending reviews for execution", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "graph_id", + "name": "graph_exec_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Graph Id" } + "schema": { "type": "string", "title": "Graph Exec Id" } } ], "responses": { "200": { - "description": "Successful Response", + "description": "List of pending reviews for the execution", "content": { "application/json": { "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/GraphExecutionJobInfo" + "$ref": "#/components/schemas/PendingHumanReviewModel" }, - "title": "Response Getv1List Execution Schedules For A Graph" + "title": "Response Getv2Get Pending Reviews For Execution" } } } }, + "400": { "description": "Invalid graph execution ID" }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "403": { "description": "Access denied to graph execution" }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + }, + "500": { + "description": "Server error", + "content": { "application/json": {} } + } + } + } + }, + "/api/review/pending": { + "get": { + "tags": ["v2", "executions", "review", "v2", "executions", "review"], + "summary": "Get Pending Reviews", + "description": "Get all pending reviews for the current user.\n\nRetrieves all reviews with status \"WAITING\" that belong to the authenticated user.\nResults are ordered by creation time (newest first).\n\nArgs:\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects with status converted to typed literals\n\nRaises:\n HTTPException: If authentication fails or database error occurs\n\nNote:\n Reviews with invalid status values are logged as warnings but excluded\n from results rather than failing the entire request.", + "operationId": "getV2Get pending reviews", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "page", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 1, + "description": "Page number (1-indexed)", + "default": 1, + "title": "Page" + }, + "description": "Page number (1-indexed)" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 100, + "minimum": 1, + "description": "Number of reviews per page", + "default": 25, + "title": "Page Size" + }, + "description": "Number of reviews per page" + } + ], + "responses": { + "200": { + "description": "List of pending reviews", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PendingHumanReviewModel" + }, + "title": "Response Getv2Get Pending Reviews" + } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2325,8 +4732,9 @@ } } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + "500": { + "description": "Server error", + "content": { "application/json": {} } } } } @@ -2390,6 +4798,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2397,61 +4808,65 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/api-keys": { + "/api/store/admin/listings": { "get": { - "tags": ["v1", "api-keys"], - "summary": "List user API keys", - "description": "List all API keys for the user", - "operationId": "getV1List user api keys", + "tags": ["v2", "admin", "store", "admin"], + "summary": "Get Admin Listings History", + "description": "Get store listings with their version history for admins.\n\nThis provides a consolidated view of listings with their versions,\nallowing for an expandable UI in the admin dashboard.\n\nArgs:\n status: Filter by submission status (PENDING, APPROVED, REJECTED)\n search: Search by name, description, or user email\n page: Page number for pagination\n page_size: Number of items per page\n\nReturns:\n StoreListingsWithVersionsResponse with listings and their versions", + "operationId": "getV2Get admin listings history", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "status", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { "$ref": "#/components/schemas/SubmissionStatus" }, + { "type": "null" } + ], + "title": "Status" + } + }, + { + "name": "search", + "in": "query", + "required": false, + "schema": { + "anyOf": [{ "type": "string" }, { "type": "null" }], + "title": "Search" + } + }, + { + "name": "page", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 1, "title": "Page" } + }, + { + "name": "page_size", + "in": "query", + "required": false, + "schema": { "type": "integer", "default": 20, "title": "Page Size" } + } + ], "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "items": { "$ref": "#/components/schemas/APIKeyInfo" }, - "type": "array", - "title": "Response Getv1List User Api Keys" + "$ref": "#/components/schemas/StoreListingsWithVersionsResponse" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v1", "api-keys"], - "summary": "Create new API key", - "description": "Create a new API key", - "operationId": "postV1Create new api key", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreateAPIKeyRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateAPIKeyResponse" - } - } - } }, "422": { "description": "Validation Error", @@ -2460,37 +4875,37 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - }, - "security": [{ "HTTPBearerJWT": [] }] + } } }, - "/api/api-keys/{key_id}": { + "/api/store/admin/submissions/download/{store_listing_version_id}": { "get": { - "tags": ["v1", "api-keys"], - "summary": "Get specific API key", - "description": "Get a specific API key", - "operationId": "getV1Get specific api key", + "tags": ["v2", "admin", "store", "admin", "store", "admin"], + "summary": "Admin Download Agent File", + "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", + "operationId": "getV2Admin download agent file", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "key_id", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Key Id" } + "schema": { + "type": "string", + "description": "The ID of the agent to download", + "title": "Store Listing Version Id" + }, + "description": "The ID of the agent to download" } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, "422": { "description": "Validation Error", @@ -2499,100 +4914,23 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v1", "api-keys"], - "summary": "Revoke API key", - "description": "Revoke an API key", - "operationId": "deleteV1Revoke api key", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "key_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Key Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/api-keys/{key_id}/suspend": { + "/api/store/admin/submissions/{store_listing_version_id}/review": { "post": { - "tags": ["v1", "api-keys"], - "summary": "Suspend API key", - "description": "Suspend an API key", - "operationId": "postV1Suspend api key", + "tags": ["v2", "admin", "store", "admin"], + "summary": "Review Store Submission", + "description": "Review a store listing submission.\n\nArgs:\n store_listing_version_id: ID of the submission to review\n request: Review details including approval status and comments\n user_id: Authenticated admin user performing the review\n\nReturns:\n StoreSubmission with updated review information", + "operationId": "postV2Review store submission", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "key_id", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Key Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/api-keys/{key_id}/permissions": { - "put": { - "tags": ["v1", "api-keys"], - "summary": "Update key permissions", - "description": "Update API key permissions", - "operationId": "putV1Update key permissions", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "key_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Key Id" } + "schema": { "type": "string", "title": "Store Listing Version Id" } } ], "requestBody": { @@ -2600,7 +4938,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/UpdatePermissionsRequest" + "$ref": "#/components/schemas/ReviewSubmissionRequest" } } } @@ -2610,10 +4948,13 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { "$ref": "#/components/schemas/APIKeyInfo" } + "schema": { "$ref": "#/components/schemas/StoreSubmission" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2621,71 +4962,10 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/store/profile": { - "get": { - "tags": ["v2", "store", "private"], - "summary": "Get user profile", - "description": "Get the profile details for the authenticated user.\nCached for 1 hour per user.", - "operationId": "getV2Get user profile", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProfileDetails" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - }, - "post": { - "tags": ["v2", "store", "private"], - "summary": "Update user profile", - "description": "Update the store profile for the authenticated user.\n\nArgs:\n profile (Profile): The updated profile details\n user_id (str): ID of the authenticated user\n\nReturns:\n CreatorDetails: The updated profile\n\nRaises:\n HTTPException: If there is an error updating the profile", - "operationId": "postV2Update user profile", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Profile" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreatorDetails" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, "/api/store/agents": { "get": { "tags": ["v2", "store", "public"], @@ -2778,6 +5058,44 @@ } } }, + "/api/store/agents/{store_listing_version_id}": { + "get": { + "tags": ["v2", "store"], + "summary": "Get agent by version", + "description": "Get Store Agent Details from Store Listing Version ID.", + "operationId": "getV2Get agent by version", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/StoreAgentDetails" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, "/api/store/agents/{username}/{agent_name}": { "get": { "tags": ["v2", "store", "public"], @@ -2818,78 +5136,6 @@ } } }, - "/api/store/graph/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store"], - "summary": "Get agent graph", - "description": "Get Agent Graph from Store Listing Version ID.", - "operationId": "getV2Get agent graph", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/agents/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store"], - "summary": "Get agent by version", - "description": "Get Store Agent Details from Store Listing Version ID.", - "operationId": "getV2Get agent by version", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/StoreAgentDetails" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, "/api/store/agents/{username}/{agent_name}/review": { "post": { "tags": ["v2", "store"], @@ -2928,6 +5174,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -2935,9 +5184,40 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + } + }, + "/api/store/creator/{username}": { + "get": { + "tags": ["v2", "store", "public"], + "summary": "Get creator details", + "description": "Get the details of a creator.\n- Creator Details Page", + "operationId": "getV2Get creator details", + "parameters": [ + { + "name": "username", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Username" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreatorDetails" } + } + } }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } @@ -3016,28 +5296,29 @@ } } }, - "/api/store/creator/{username}": { + "/api/store/download/agents/{store_listing_version_id}": { "get": { "tags": ["v2", "store", "public"], - "summary": "Get creator details", - "description": "Get the details of a creator.\n- Creator Details Page", - "operationId": "getV2Get creator details", + "summary": "Download agent file", + "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", + "operationId": "getV2Download agent file", "parameters": [ { - "name": "username", + "name": "store_listing_version_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Username" } + "schema": { + "type": "string", + "description": "The ID of the agent to download", + "title": "Store Listing Version Id" + }, + "description": "The ID of the agent to download" } ], "responses": { "200": { "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CreatorDetails" } - } - } + "content": { "application/json": { "schema": {} } } }, "422": { "description": "Validation Error", @@ -3050,6 +5331,54 @@ } } }, + "/api/store/graph/{store_listing_version_id}": { + "get": { + "tags": ["v2", "store"], + "summary": "Get agent graph", + "description": "Get Agent Graph from Store Listing Version ID.", + "operationId": "getV2Get agent graph", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "store_listing_version_id", + "in": "path", + "required": true, + "schema": { "type": "string", "title": "Store Listing Version Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + } + } + }, + "/api/store/metrics/cache": { + "get": { + "tags": ["v2", "store", "metrics"], + "summary": "Get cache metrics in Prometheus format", + "description": "Get cache metrics in Prometheus text format.\n\nReturns Prometheus-compatible metrics for monitoring cache performance.\nMetrics include size, maxsize, TTL, and hit rate for each cache.\n\nReturns:\n str: Prometheus-formatted metrics text", + "operationId": "getV2Get cache metrics in prometheus format", + "responses": { + "200": { + "description": "Successful Response", + "content": { "text/plain": { "schema": { "type": "string" } } } + } + } + } + }, "/api/store/myagents": { "get": { "tags": ["v2", "store", "private"], @@ -3090,6 +5419,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3097,40 +5429,56 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } }, - "/api/store/submissions/{submission_id}": { - "delete": { + "/api/store/profile": { + "get": { "tags": ["v2", "store", "private"], - "summary": "Delete store submission", - "description": "Delete a store listing submission.\n\nArgs:\n user_id (str): ID of the authenticated user\n submission_id (str): ID of the submission to be deleted\n\nReturns:\n bool: True if the submission was successfully deleted, False otherwise", - "operationId": "deleteV2Delete store submission", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "submission_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Submission Id" } - } - ], + "summary": "Get user profile", + "description": "Get the profile details for the authenticated user.\nCached for 1 hour per user.", + "operationId": "getV2Get user profile", "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "type": "boolean", - "title": "Response Deletev2Delete Store Submission" - } + "schema": { "$ref": "#/components/schemas/ProfileDetails" } } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + } + }, + "security": [{ "HTTPBearerJWT": [] }] + }, + "post": { + "tags": ["v2", "store", "private"], + "summary": "Update user profile", + "description": "Update the store profile for the authenticated user.\n\nArgs:\n profile (Profile): The updated profile details\n user_id (str): ID of the authenticated user\n\nReturns:\n CreatorDetails: The updated profile\n\nRaises:\n HTTPException: If there is an error updating the profile", + "operationId": "postV2Update user profile", + "requestBody": { + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/Profile" } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/CreatorDetails" } + } + } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3138,11 +5486,9 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } - } + }, + "security": [{ "HTTPBearerJWT": [] }] } }, "/api/store/submissions": { @@ -3177,6 +5523,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3184,9 +5533,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } }, @@ -3215,6 +5561,9 @@ } } }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, "422": { "description": "Validation Error", "content": { @@ -3222,13 +5571,80 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } + } + } + } + }, + "/api/store/submissions/generate_image": { + "post": { + "tags": ["v2", "store", "private"], + "summary": "Generate submission image", + "description": "Generate an image for a store listing submission.\n\nArgs:\n agent_id (str): ID of the agent to generate an image for\n user_id (str): ID of the authenticated user\n\nReturns:\n JSONResponse: JSON containing the URL of the generated image", + "operationId": "postV2Generate submission image", + "security": [{ "HTTPBearerJWT": [] }], + "parameters": [ + { + "name": "agent_id", + "in": "query", + "required": true, + "schema": { "type": "string", "title": "Agent Id" } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } } } } }, + "/api/store/submissions/media": { + "post": { + "tags": ["v2", "store", "private"], + "summary": "Upload submission media", + "description": "Upload media (images/videos) for a store listing submission.\n\nArgs:\n file (UploadFile): The media file to upload\n user_id (str): ID of the authenticated user uploading the media\n\nReturns:\n str: URL of the uploaded media file\n\nRaises:\n HTTPException: If there is an error uploading the media", + "operationId": "postV2Upload submission media", + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_postV2Upload_submission_media" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { "application/json": { "schema": {} } } + }, + "401": { + "$ref": "#/components/responses/HTTP401NotAuthenticatedError" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/HTTPValidationError" } + } + } + } + }, + "security": [{ "HTTPBearerJWT": [] }] + } + }, "/api/store/submissions/{store_listing_version_id}": { "put": { "tags": ["v2", "store", "private"], @@ -3263,113 +5679,8 @@ } } }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/submissions/media": { - "post": { - "tags": ["v2", "store", "private"], - "summary": "Upload submission media", - "description": "Upload media (images/videos) for a store listing submission.\n\nArgs:\n file (UploadFile): The media file to upload\n user_id (str): ID of the authenticated user uploading the media\n\nReturns:\n str: URL of the uploaded media file\n\nRaises:\n HTTPException: If there is an error uploading the media", - "operationId": "postV2Upload submission media", - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Upload_submission_media" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/store/submissions/generate_image": { - "post": { - "tags": ["v2", "store", "private"], - "summary": "Generate submission image", - "description": "Generate an image for a store listing submission.\n\nArgs:\n agent_id (str): ID of the agent to generate an image for\n user_id (str): ID of the authenticated user\n\nReturns:\n JSONResponse: JSON containing the URL of the generated image", - "operationId": "postV2Generate submission image", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "agent_id", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/download/agents/{store_listing_version_id}": { - "get": { - "tags": ["v2", "store", "public"], - "summary": "Download agent file", - "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", - "operationId": "getV2Download agent file", - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The ID of the agent to download", - "title": "Store Listing Version Id" - }, - "description": "The ID of the agent to download" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } }, "422": { "description": "Validation Error", @@ -3382,1411 +5693,19 @@ } } }, - "/api/store/metrics/cache": { - "get": { - "tags": ["v2", "store", "metrics"], - "summary": "Get cache metrics in Prometheus format", - "description": "Get cache metrics in Prometheus text format.\n\nReturns Prometheus-compatible metrics for monitoring cache performance.\nMetrics include size, maxsize, TTL, and hit rate for each cache.\n\nReturns:\n str: Prometheus-formatted metrics text", - "operationId": "getV2Get cache metrics in prometheus format", - "responses": { - "200": { - "description": "Successful Response", - "content": { "text/plain": { "schema": { "type": "string" } } } - } - } - } - }, - "/api/builder/suggestions": { - "get": { - "tags": ["v2"], - "summary": "Get Builder suggestions", - "description": "Get all suggestions for the Blocks Menu.", - "operationId": "getV2Get builder suggestions", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SuggestionsResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/builder/categories": { - "get": { - "tags": ["v2"], - "summary": "Get Builder block categories", - "description": "Get all block categories with a specified number of blocks per category.", - "operationId": "getV2Get builder block categories", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "blocks_per_category", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "default": 3, - "title": "Blocks Per Category" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/BlockCategoryResponse" - }, - "title": "Response Getv2Get Builder Block Categories" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/blocks": { - "get": { - "tags": ["v2"], - "summary": "Get Builder blocks", - "description": "Get blocks based on either category, type, or provider.", - "operationId": "getV2Get builder blocks", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "category", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Category" - } - }, - { - "name": "type", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "enum": ["all", "input", "action", "output"], - "type": "string" - }, - { "type": "null" } - ], - "title": "Type" - } - }, - { - "name": "provider", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "string", - "description": "Provider name for integrations. Can be any string value, including custom provider names." - }, - { "type": "null" } - ], - "title": "Provider" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/BlockResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/blocks/batch": { - "get": { - "tags": ["v2"], - "summary": "Get specific blocks", - "description": "Get specific blocks by their IDs.", - "operationId": "getV2Get specific blocks", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "block_ids", - "in": "query", - "required": true, - "schema": { - "type": "array", - "items": { "type": "string" }, - "title": "Block Ids" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { "$ref": "#/components/schemas/BlockInfo" }, - "title": "Response Getv2Get Specific Blocks" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/providers": { - "get": { - "tags": ["v2"], - "summary": "Get Builder integration providers", - "description": "Get all integration providers with their block counts.", - "operationId": "getV2Get builder integration providers", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ProviderResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/search": { - "get": { - "tags": ["v2", "store", "private"], - "summary": "Builder search", - "description": "Search for blocks (including integrations), marketplace agents, and user library agents.", - "operationId": "getV2Builder search", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search_query", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search Query" - } - }, - { - "name": "filter", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "array", - "items": { - "enum": [ - "blocks", - "integrations", - "marketplace_agents", - "my_agents" - ], - "type": "string" - } - }, - { "type": "null" } - ], - "title": "Filter" - } - }, - { - "name": "search_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search Id" - } - }, - { - "name": "by_creator", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "type": "array", "items": { "type": "string" } }, - { "type": "null" } - ], - "title": "By Creator" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 50, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/SearchResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/builder/counts": { - "get": { - "tags": ["v2"], - "summary": "Get Builder item counts", - "description": "Get item counts for the menu categories in the Blocks Menu.", - "operationId": "getV2Get builder item counts", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/CountResponse" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/store/admin/listings": { - "get": { - "tags": ["v2", "admin", "store", "admin"], - "summary": "Get Admin Listings History", - "description": "Get store listings with their version history for admins.\n\nThis provides a consolidated view of listings with their versions,\nallowing for an expandable UI in the admin dashboard.\n\nArgs:\n status: Filter by submission status (PENDING, APPROVED, REJECTED)\n search: Search by name, description, or user email\n page: Page number for pagination\n page_size: Number of items per page\n\nReturns:\n StoreListingsWithVersionsResponse with listings and their versions", - "operationId": "getV2Get admin listings history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "status", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/SubmissionStatus" }, - { "type": "null" } - ], - "title": "Status" - } - }, - { - "name": "search", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 20, "title": "Page Size" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/StoreListingsWithVersionsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/admin/submissions/{store_listing_version_id}/review": { - "post": { - "tags": ["v2", "admin", "store", "admin"], - "summary": "Review Store Submission", - "description": "Review a store listing submission.\n\nArgs:\n store_listing_version_id: ID of the submission to review\n request: Review details including approval status and comments\n user_id: Authenticated admin user performing the review\n\nReturns:\n StoreSubmission with updated review information", - "operationId": "postV2Review store submission", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ReviewSubmissionRequest" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/StoreSubmission" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/store/admin/submissions/download/{store_listing_version_id}": { - "get": { - "tags": ["v2", "admin", "store", "admin", "store", "admin"], - "summary": "Admin Download Agent File", - "description": "Download the agent file by streaming its content.\n\nArgs:\n store_listing_version_id (str): The ID of the agent to download\n\nReturns:\n StreamingResponse: A streaming response containing the agent's graph data.\n\nRaises:\n HTTPException: If the agent is not found or an unexpected error occurs.", - "operationId": "getV2Admin download agent file", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The ID of the agent to download", - "title": "Store Listing Version Id" - }, - "description": "The ID of the agent to download" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/credits/admin/add_credits": { - "post": { - "tags": ["v2", "admin", "credits", "admin"], - "summary": "Add Credits to User", - "operationId": "postV2Add credits to user", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Add_credits_to_user" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AddUserCreditsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/credits/admin/users_history": { - "get": { - "tags": ["v2", "admin", "credits", "admin"], - "summary": "Get All Users History", - "operationId": "getV2Get all users history", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "Search" - } - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 1, "title": "Page" } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 20, "title": "Page Size" } - }, - { - "name": "transaction_filter", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/CreditTransactionType" }, - { "type": "null" } - ], - "title": "Transaction Filter" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UserHistoryResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/executions/admin/execution_analytics/config": { - "get": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Get Execution Analytics Configuration", - "description": "Get the configuration for execution analytics including:\n- Available AI models with metadata\n- Default system and user prompts\n- Recommended model selection", - "operationId": "getV2Get execution analytics configuration", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsConfig" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/executions/admin/execution_analytics": { - "post": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Generate Execution Analytics", - "description": "Generate activity summaries and correctness scores for graph executions.\n\nThis endpoint:\n1. Fetches all completed executions matching the criteria\n2. Identifies executions missing activity_status or correctness_score\n3. Generates missing data using AI in batches\n4. Updates the database with new stats\n5. Returns a detailed report of the analytics operation", - "operationId": "postV2Generate execution analytics", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExecutionAnalyticsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/executions/admin/execution_accuracy_trends": { - "get": { - "tags": ["v2", "admin", "admin", "execution_analytics"], - "summary": "Get Execution Accuracy Trends and Alerts", - "description": "Get execution accuracy trends with moving averages and alert detection.\nSimple single-query approach.", - "operationId": "getV2Get execution accuracy trends and alerts", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "user_id", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "title": "User Id" - } - }, - { - "name": "days_back", - "in": "query", - "required": false, - "schema": { "type": "integer", "default": 30, "title": "Days Back" } - }, - { - "name": "drop_threshold", - "in": "query", - "required": false, - "schema": { - "type": "number", - "default": 10.0, - "title": "Drop Threshold" - } - }, - { - "name": "include_historical", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": false, - "title": "Include Historical" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AccuracyTrendsResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/pending": { - "get": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Get Pending Reviews", - "description": "Get all pending reviews for the current user.\n\nRetrieves all reviews with status \"WAITING\" that belong to the authenticated user.\nResults are ordered by creation time (newest first).\n\nArgs:\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects with status converted to typed literals\n\nRaises:\n HTTPException: If authentication fails or database error occurs\n\nNote:\n Reviews with invalid status values are logged as warnings but excluded\n from results rather than failing the entire request.", - "operationId": "getV2Get pending reviews", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number (1-indexed)", - "default": 1, - "title": "Page" - }, - "description": "Page number (1-indexed)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "maximum": 100, - "minimum": 1, - "description": "Number of reviews per page", - "default": 25, - "title": "Page Size" - }, - "description": "Number of reviews per page" - } - ], - "responses": { - "200": { - "description": "List of pending reviews", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/PendingHumanReviewModel" - }, - "title": "Response Getv2Get Pending Reviews" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/execution/{graph_exec_id}": { - "get": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Get Pending Reviews for Execution", - "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", - "operationId": "getV2Get pending reviews for execution", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_exec_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Exec Id" } - } - ], - "responses": { - "200": { - "description": "List of pending reviews for the execution", - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "$ref": "#/components/schemas/PendingHumanReviewModel" - }, - "title": "Response Getv2Get Pending Reviews For Execution" - } - } - } - }, - "400": { "description": "Invalid graph execution ID" }, - "403": { "description": "Access denied to graph execution" }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/review/action": { - "post": { - "tags": ["v2", "executions", "review", "v2", "executions", "review"], - "summary": "Process Review Action", - "description": "Process reviews with approve or reject actions.", - "operationId": "postV2ProcessReviewAction", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ReviewRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ReviewResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/library/presets": { - "get": { - "tags": ["v2", "presets"], - "summary": "List presets", - "description": "Retrieve a paginated list of presets for the current user.", - "operationId": "getV2List presets", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "default": 1, - "title": "Page" - } - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "default": 10, - "title": "Page Size" - } - }, - { - "name": "graph_id", - "in": "query", - "required": true, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "description": "Allows to filter presets by a specific agent graph", - "title": "Graph Id" - }, - "description": "Allows to filter presets by a specific agent graph" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentPresetResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "post": { - "tags": ["v2", "presets"], - "summary": "Create a new preset", - "description": "Create a new preset for the current user.", - "operationId": "postV2Create a new preset", - "security": [{ "HTTPBearerJWT": [] }], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "anyOf": [ - { - "$ref": "#/components/schemas/LibraryAgentPresetCreatable" - }, - { - "$ref": "#/components/schemas/LibraryAgentPresetCreatableFromGraphExecution" - } - ], - "title": "Preset" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/presets/{preset_id}": { - "get": { - "tags": ["v2", "presets"], - "summary": "Get a specific preset", - "description": "Retrieve details for a specific preset by its ID.", - "operationId": "getV2Get a specific preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "patch": { - "tags": ["v2", "presets"], - "summary": "Update an existing preset", - "description": "Update an existing preset by its ID.", - "operationId": "patchV2Update an existing preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentPresetUpdatable" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, + "/api/store/submissions/{submission_id}": { "delete": { - "tags": ["v2", "presets"], - "summary": "Delete a preset", - "description": "Delete an existing preset by its ID.", - "operationId": "deleteV2Delete a preset", + "tags": ["v2", "store", "private"], + "summary": "Delete store submission", + "description": "Delete a store listing submission.\n\nArgs:\n user_id (str): ID of the authenticated user\n submission_id (str): ID of the submission to be deleted\n\nReturns:\n bool: True if the submission was successfully deleted, False otherwise", + "operationId": "deleteV2Delete store submission", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ { - "name": "preset_id", + "name": "submission_id", "in": "path", "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "responses": { - "204": { "description": "Successful Response" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/presets/setup-trigger": { - "post": { - "tags": ["v2", "presets"], - "summary": "Setup Trigger", - "description": "Sets up a webhook-triggered `LibraryAgentPreset` for a `LibraryAgent`.\nReturns the correspondingly created `LibraryAgentPreset` with `webhook_id` set.", - "operationId": "postV2SetupTrigger", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TriggeredPresetSetupRequest" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgentPreset" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/library/presets/{preset_id}/execute": { - "post": { - "tags": ["v2", "presets", "presets"], - "summary": "Execute a preset", - "description": "Execute a preset with the given graph and node input for the current user.", - "operationId": "postV2Execute a preset", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "preset_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Preset Id" } - } - ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Execute_a_preset" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/GraphExecutionMeta" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "List Library Agents", - "description": "Get all agents in the user's library (both created and saved).\n\nArgs:\n user_id: ID of the authenticated user.\n search_term: Optional search term to filter agents by name/description.\n filter_by: List of filters to apply (favorites, created by user).\n sort_by: List of sorting criteria (created date, updated date).\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", - "operationId": "getV2List library agents", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "search_term", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "string" }, { "type": "null" }], - "description": "Search term to filter agents", - "title": "Search Term" - }, - "description": "Search term to filter agents" - }, - { - "name": "sort_by", - "in": "query", - "required": false, - "schema": { - "$ref": "#/components/schemas/LibraryAgentSort", - "description": "Criteria to sort results by", - "default": "updatedAt" - }, - "description": "Criteria to sort results by" - }, - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number to retrieve (must be >= 1)", - "default": 1, - "title": "Page" - }, - "description": "Page number to retrieve (must be >= 1)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Number of agents per page (must be >= 1)", - "default": 15, - "title": "Page Size" - }, - "description": "Number of agents per page (must be >= 1)" - } - ], - "responses": { - "200": { - "description": "List of library agents", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentResponse" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "post": { - "tags": ["v2", "library", "private"], - "summary": "Add Marketplace Agent", - "description": "Add an agent from the marketplace to the user's library.\n\nArgs:\n store_listing_version_id: ID of the store listing version to add.\n user_id: ID of the authenticated user.\n\nReturns:\n library_model.LibraryAgent: Agent added to the library\n\nRaises:\n HTTPException(404): If the listing version is not found.\n HTTPException(500): If a server/database error occurs.", - "operationId": "postV2Add marketplace agent", - "security": [{ "HTTPBearerJWT": [] }], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postV2Add_marketplace_agent" - } - } - } - }, - "responses": { - "201": { - "description": "Agent added successfully", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "404": { "description": "Store listing version not found" }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/favorites": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "List Favorite Library Agents", - "description": "Get all favorite agents in the user's library.\n\nArgs:\n user_id: ID of the authenticated user.\n page: Page number to retrieve.\n page_size: Number of agents per page.\n\nReturns:\n A LibraryAgentResponse containing favorite agents and pagination metadata.\n\nRaises:\n HTTPException: If a server/database error occurs.", - "operationId": "getV2List favorite library agents", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "page", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Page number to retrieve (must be >= 1)", - "default": 1, - "title": "Page" - }, - "description": "Page number to retrieve (must be >= 1)" - }, - { - "name": "page_size", - "in": "query", - "required": false, - "schema": { - "type": "integer", - "minimum": 1, - "description": "Number of agents per page (must be >= 1)", - "default": 15, - "title": "Page Size" - }, - "description": "Number of agents per page (must be >= 1)" + "schema": { "type": "string", "title": "Submission Id" } } ], "responses": { @@ -4795,447 +5714,14 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LibraryAgentResponse" - } - } - } - }, - "500": { - "description": "Server error", - "content": { "application/json": {} } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/{library_agent_id}": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "Get Library Agent", - "operationId": "getV2Get library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "patch": { - "tags": ["v2", "library", "private"], - "summary": "Update Library Agent", - "description": "Update the library agent with the given fields.\n\nArgs:\n library_agent_id: ID of the library agent to update.\n payload: Fields to update (auto_update_version, is_favorite, etc.).\n user_id: ID of the authenticated user.\n\nRaises:\n HTTPException(500): If a server/database error occurs.", - "operationId": "patchV2Update library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/LibraryAgentUpdateRequest" - } - } - } - }, - "responses": { - "200": { - "description": "Agent updated successfully", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - }, - "delete": { - "tags": ["v2", "library", "private"], - "summary": "Delete Library Agent", - "description": "Soft-delete the specified library agent.\n\nArgs:\n library_agent_id: ID of the library agent to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.\n\nRaises:\n HTTPException(404): If the agent does not exist.\n HTTPException(500): If a server/database error occurs.", - "operationId": "deleteV2Delete library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "204": { "description": "Agent deleted successfully" }, - "404": { "description": "Agent not found" }, - "500": { "description": "Server error" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/by-graph/{graph_id}": { - "get": { - "tags": ["v2", "library", "private"], - "summary": "Get Library Agent By Graph Id", - "operationId": "getV2GetLibraryAgentByGraphId", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "graph_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Graph Id" } - }, - { - "name": "version", - "in": "query", - "required": false, - "schema": { - "anyOf": [{ "type": "integer" }, { "type": "null" }], - "title": "Version" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/marketplace/{store_listing_version_id}": { - "get": { - "tags": ["v2", "library", "private", "store", "library"], - "summary": "Get Agent By Store ID", - "description": "Get Library Agent from Store Listing Version ID.", - "operationId": "getV2Get agent by store id", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "store_listing_version_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Store Listing Version Id" } - } - ], - "responses": { - "200": { - "description": "Library agent found", - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/LibraryAgent" }, - { "type": "null" } - ], - "title": "Response Getv2Get Agent By Store Id" - } - } - } - }, - "404": { "description": "Agent not found" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/library/agents/{library_agent_id}/fork": { - "post": { - "tags": ["v2", "library", "private"], - "summary": "Fork Library Agent", - "operationId": "postV2Fork library agent", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "library_agent_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Library Agent Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/LibraryAgent" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/otto/ask": { - "post": { - "tags": ["v2", "otto"], - "summary": "Proxy Otto Chat Request", - "description": "Proxy requests to Otto API while adding necessary security headers and logging.\nRequires an authenticated user.", - "operationId": "postV2Proxy otto chat request", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ChatRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/ApiResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/email/unsubscribe": { - "post": { - "tags": ["v1", "email"], - "summary": "One Click Email Unsubscribe", - "operationId": "postV1One click email unsubscribe", - "parameters": [ - { - "name": "token", - "in": "query", - "required": true, - "schema": { "type": "string", "title": "Token" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/email/": { - "post": { - "tags": ["v1", "email"], - "summary": "Handle Postmark Email Webhooks", - "operationId": "postV1Handle postmark email webhooks", - "requestBody": { - "content": { - "application/json": { - "schema": { - "oneOf": [ - { "$ref": "#/components/schemas/PostmarkDeliveryWebhook" }, - { "$ref": "#/components/schemas/PostmarkBounceWebhook" }, - { - "$ref": "#/components/schemas/PostmarkSpamComplaintWebhook" - }, - { "$ref": "#/components/schemas/PostmarkOpenWebhook" }, - { "$ref": "#/components/schemas/PostmarkClickWebhook" }, - { - "$ref": "#/components/schemas/PostmarkSubscriptionChangeWebhook" - } - ], - "title": "Webhook", - "discriminator": { - "propertyName": "RecordType", - "mapping": { - "Delivery": "#/components/schemas/PostmarkDeliveryWebhook", - "Bounce": "#/components/schemas/PostmarkBounceWebhook", - "SpamComplaint": "#/components/schemas/PostmarkSpamComplaintWebhook", - "Open": "#/components/schemas/PostmarkOpenWebhook", - "Click": "#/components/schemas/PostmarkClickWebhook", - "SubscriptionChange": "#/components/schemas/PostmarkSubscriptionChangeWebhook" - } - } - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - }, - "security": [{ "APIKeyAuthenticator-X-Postmark-Webhook-Token": [] }] - } - }, - "/api/chat/sessions": { - "post": { - "tags": ["v2", "chat", "chat"], - "summary": "Create Session", - "description": "Create a new chat session.\n\nInitiates a new chat session for either an authenticated or anonymous user.\n\nArgs:\n user_id: The optional authenticated user ID parsed from the JWT. If missing, creates an anonymous session.\n\nReturns:\n CreateSessionResponse: Details of the created session.", - "operationId": "postV2CreateSession", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CreateSessionResponse" + "type": "boolean", + "title": "Response Deletev2Delete Store Submission" } } } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/chat/sessions/{session_id}": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Get Session", - "description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.", - "operationId": "getV2GetSession", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SessionDetailResponse" - } - } - } }, "422": { "description": "Validation Error", @@ -5244,491 +5730,6 @@ "schema": { "$ref": "#/components/schemas/HTTPValidationError" } } } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/sessions/{session_id}/stream": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Stream Chat", - "description": "Stream chat responses for a session.\n\nStreams the AI/completion responses in real time over Server-Sent Events (SSE), including:\n - Text fragments as they are generated\n - Tool call UI elements (if invoked)\n - Tool execution results\n\nArgs:\n session_id: The chat session identifier to associate with the streamed messages.\n message: The user's new message to process.\n user_id: Optional authenticated user ID.\n is_user_message: Whether the message is a user message.\nReturns:\n StreamingResponse: SSE-formatted response chunks.", - "operationId": "getV2StreamChat", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - }, - { - "name": "message", - "in": "query", - "required": true, - "schema": { - "type": "string", - "minLength": 1, - "maxLength": 10000, - "title": "Message" - } - }, - { - "name": "is_user_message", - "in": "query", - "required": false, - "schema": { - "type": "boolean", - "default": true, - "title": "Is User Message" - } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/sessions/{session_id}/assign-user": { - "patch": { - "tags": ["v2", "chat", "chat"], - "summary": "Session Assign User", - "description": "Assign an authenticated user to a chat session.\n\nUsed (typically post-login) to claim an existing anonymous session as the current authenticated user.\n\nArgs:\n session_id: The identifier for the (previously anonymous) session.\n user_id: The authenticated user's ID to associate with the session.\n\nReturns:\n dict: Status of the assignment.", - "operationId": "patchV2SessionAssignUser", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "session_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Session Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "type": "object", - "additionalProperties": true, - "title": "Response Patchv2Sessionassignuser" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/chat/health": { - "get": { - "tags": ["v2", "chat", "chat"], - "summary": "Health Check", - "description": "Health check endpoint for the chat service.\n\nPerforms a full cycle test of session creation, assignment, and retrieval. Should always return healthy\nif the service and data layer are operational.\n\nReturns:\n dict: A status dictionary indicating health, service name, and API version.", - "operationId": "getV2HealthCheck", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "additionalProperties": true, - "type": "object", - "title": "Response Getv2Healthcheck" - } - } - } - } - } - } - }, - "/api/oauth/app/{client_id}": { - "get": { - "tags": ["oauth"], - "summary": "Get Oauth App Info", - "description": "Get public information about an OAuth application.\n\nThis endpoint is used by the consent screen to display application details\nto the user before they authorize access.\n\nReturns:\n- name: Application name\n- description: Application description (if provided)\n- scopes: List of scopes the application is allowed to request", - "operationId": "getOauthGetOauthAppInfo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "client_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "Client Id" } - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationPublicInfo" - } - } - } - }, - "404": { "description": "Application not found or disabled" }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/authorize": { - "post": { - "tags": ["oauth"], - "summary": "Authorize", - "description": "OAuth 2.0 Authorization Endpoint\n\nUser must be logged in (authenticated with Supabase JWT).\nThis endpoint creates an authorization code and returns a redirect URL.\n\nPKCE (Proof Key for Code Exchange) is REQUIRED for all authorization requests.\n\nThe frontend consent screen should call this endpoint after the user approves,\nthen redirect the user to the returned `redirect_url`.\n\nRequest Body:\n- client_id: The OAuth application's client ID\n- redirect_uri: Where to redirect after authorization (must match registered URI)\n- scopes: List of permissions (e.g., \"EXECUTE_GRAPH READ_GRAPH\")\n- state: Anti-CSRF token provided by client (will be returned in redirect)\n- response_type: Must be \"code\" (for authorization code flow)\n- code_challenge: PKCE code challenge (required)\n- code_challenge_method: \"S256\" (recommended) or \"plain\"\n\nReturns:\n- redirect_url: The URL to redirect the user to (includes authorization code)\n\nError cases return a redirect_url with error parameters, or raise HTTPException\nfor critical errors (like invalid redirect_uri).", - "operationId": "postOauthAuthorize", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AuthorizeRequest" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/AuthorizeResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/oauth/token": { - "post": { - "tags": ["oauth"], - "summary": "Token", - "description": "OAuth 2.0 Token Endpoint\n\nExchanges authorization code or refresh token for access token.\n\nGrant Types:\n1. authorization_code: Exchange authorization code for tokens\n - Required: grant_type, code, redirect_uri, client_id, client_secret\n - Optional: code_verifier (required if PKCE was used)\n\n2. refresh_token: Exchange refresh token for new access token\n - Required: grant_type, refresh_token, client_id, client_secret\n\nReturns:\n- access_token: Bearer token for API access (1 hour TTL)\n- token_type: \"Bearer\"\n- expires_in: Seconds until access token expires\n- refresh_token: Token for refreshing access (30 days TTL)\n- scopes: List of scopes", - "operationId": "postOauthToken", - "requestBody": { - "content": { - "application/json": { - "schema": { - "anyOf": [ - { "$ref": "#/components/schemas/TokenRequestByCode" }, - { "$ref": "#/components/schemas/TokenRequestByRefreshToken" } - ], - "title": "Request" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/TokenResponse" } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/introspect": { - "post": { - "tags": ["oauth"], - "summary": "Introspect", - "description": "OAuth 2.0 Token Introspection Endpoint (RFC 7662)\n\nAllows clients to check if a token is valid and get its metadata.\n\nReturns:\n- active: Whether the token is currently active\n- scopes: List of authorized scopes (if active)\n- client_id: The client the token was issued to (if active)\n- user_id: The user the token represents (if active)\n- exp: Expiration timestamp (if active)\n- token_type: \"access_token\" or \"refresh_token\" (if active)", - "operationId": "postOauthIntrospect", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_postOauthIntrospect" - } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/TokenIntrospectionResult" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/revoke": { - "post": { - "tags": ["oauth"], - "summary": "Revoke", - "description": "OAuth 2.0 Token Revocation Endpoint (RFC 7009)\n\nAllows clients to revoke an access or refresh token.\n\nNote: Revoking a refresh token does NOT revoke associated access tokens.\nRevoking an access token does NOT revoke the associated refresh token.", - "operationId": "postOauthRevoke", - "requestBody": { - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/Body_postOauthRevoke" } - } - }, - "required": true - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { "application/json": { "schema": {} } } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - } - } - } - }, - "/api/oauth/apps/mine": { - "get": { - "tags": ["oauth"], - "summary": "List My Oauth Apps", - "description": "List all OAuth applications owned by the current user.\n\nReturns a list of OAuth applications with their details including:\n- id, name, description, logo_url\n- client_id (public identifier)\n- redirect_uris, grant_types, scopes\n- is_active status\n- created_at, updated_at timestamps\n\nNote: client_secret is never returned for security reasons.", - "operationId": "getOauthListMyOauthApps", - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "items": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - }, - "type": "array", - "title": "Response Getoauthlistmyoauthapps" - } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - }, - "security": [{ "HTTPBearerJWT": [] }] - } - }, - "/api/oauth/apps/{app_id}/status": { - "patch": { - "tags": ["oauth"], - "summary": "Update App Status", - "description": "Enable or disable an OAuth application.\n\nOnly the application owner can update the status.\nWhen disabled, the application cannot be used for new authorizations\nand existing access tokens will fail validation.\n\nReturns the updated application info.", - "operationId": "patchOauthUpdateAppStatus", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_patchOauthUpdateAppStatus" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/apps/{app_id}/logo": { - "patch": { - "tags": ["oauth"], - "summary": "Update App Logo", - "description": "Update the logo URL for an OAuth application.\n\nOnly the application owner can update the logo.\nThe logo should be uploaded first using the media upload endpoint,\nthen this endpoint is called with the resulting URL.\n\nLogo requirements:\n- Must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n\nReturns the updated application info.", - "operationId": "patchOauthUpdateAppLogo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/UpdateAppLogoRequest" } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" - } - } - } - }, - "/api/oauth/apps/{app_id}/logo/upload": { - "post": { - "tags": ["oauth"], - "summary": "Upload App Logo", - "description": "Upload a logo image for an OAuth application.\n\nRequirements:\n- Image must be square (1:1 aspect ratio)\n- Minimum 512x512 pixels\n- Maximum 2048x2048 pixels\n- Allowed formats: JPEG, PNG, WebP\n- Maximum file size: 3MB\n\nThe image is uploaded to cloud storage and the app's logoUrl is updated.\nReturns the updated application info.", - "operationId": "postOauthUploadAppLogo", - "security": [{ "HTTPBearerJWT": [] }], - "parameters": [ - { - "name": "app_id", - "in": "path", - "required": true, - "schema": { "type": "string", "title": "App Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "multipart/form-data": { - "schema": { - "$ref": "#/components/schemas/Body_postOauthUploadAppLogo" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OAuthApplicationInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { "$ref": "#/components/schemas/HTTPValidationError" } - } - } - }, - "401": { - "$ref": "#/components/responses/HTTP401NotAuthenticatedError" } } } @@ -6318,6 +6319,25 @@ "required": ["is_active"], "title": "Body_patchOauthUpdateAppStatus" }, + "Body_postAnalyticsLogRawAnalytics": { + "properties": { + "type": { "type": "string", "title": "Type" }, + "data": { + "additionalProperties": true, + "type": "object", + "title": "Data", + "description": "The data to log" + }, + "data_index": { + "type": "string", + "title": "Data Index", + "description": "Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc." + } + }, + "type": "object", + "required": ["type", "data", "data_index"], + "title": "Body_postAnalyticsLogRawAnalytics" + }, "Body_postOauthIntrospect": { "properties": { "token": { @@ -6426,25 +6446,6 @@ "type": "object", "title": "Body_postV1Execute graph agent" }, - "Body_postV1LogRawAnalytics": { - "properties": { - "type": { "type": "string", "title": "Type" }, - "data": { - "additionalProperties": true, - "type": "object", - "title": "Data", - "description": "The data to log" - }, - "data_index": { - "type": "string", - "title": "Data Index", - "description": "Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc." - } - }, - "type": "object", - "required": ["type", "data", "data_index"], - "title": "Body_postV1LogRawAnalytics" - }, "Body_postV1Upload_file_to_cloud_storage": { "properties": { "file": { "type": "string", "format": "binary", "title": "File" } @@ -11627,15 +11628,15 @@ } }, "securitySchemes": { - "HTTPBearerJWT": { - "type": "http", - "scheme": "bearer", - "bearerFormat": "jwt" - }, "APIKeyAuthenticator-X-Postmark-Webhook-Token": { "type": "apiKey", "in": "header", "name": "X-Postmark-Webhook-Token" + }, + "HTTPBearerJWT": { + "type": "http", + "scheme": "bearer", + "bearerFormat": "jwt" } }, "responses": { From 08a60dcb9b33dfe486d1be0cf6bfe1c792dbd63b Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sat, 20 Dec 2025 22:46:24 +0100 Subject: [PATCH 258/260] refactor(frontend): Clean up React Query-related code (#11604) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - #11603 ### Changes 🏗️ Frontend: - Make `okData` infer the response data type instead of casting - Generalize infinite query utilities from `SidebarRunsList/helpers.ts` - Move to `@/app/api/helpers` and use wherever possible - Simplify/replace boilerplate checks and conditions with `okData` in many places - Add `useUserTimezone` hook to replace all the boilerplate timezone queries Backend: - Fix response type annotation of `GET /api/store/graph/{store_listing_version_id}` endpoint - Fix documentation and error behavior of `GET /api/review/execution/{graph_exec_id}` endpoint ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - CI passes - [x] Clicking around the app manually -> no obvious issues - [x] Test Onboarding step 5 (run) - [x] Library runs list loads normally --- .../executions/review/review_routes_test.py | 29 +++--- .../api/features/executions/review/routes.py | 9 +- .../backend/api/features/store/routes.py | 4 +- autogpt_platform/frontend/orval.config.ts | 6 ++ .../app/(no-navbar)/onboarding/5-run/page.tsx | 28 +++--- .../onboarding/5-run/useOnboardingRunStep.tsx | 66 ++++++------- .../app/(no-navbar)/share/[token]/page.tsx | 6 +- .../components/ExecutionAnalyticsForm.tsx | 21 ++--- .../auth/integrations/setup-wizard/page.tsx | 3 +- .../useCronSchedulerDialog.ts | 8 +- .../build/components/FlowEditor/Flow/Flow.tsx | 3 +- .../BlockMenuSearch/BlockMenuSearch.tsx | 6 +- .../BlockMenuSearch/useBlockMenuSearch.ts | 58 +++++------- .../IntegrationBlocks/useIntegrationBlocks.ts | 36 +++---- .../useMarketplaceAgentsContent.ts | 28 ++---- .../MyAgentsContent/useMyAgentsContent.ts | 26 ++--- .../usePaginatedBlocks.ts | 26 ++--- .../usePaginatedIntegrationList.ts | 26 ++--- .../src/app/(platform)/chat/useChatSession.ts | 7 +- .../TimezoneNotice/TimezoneNotice.tsx | 10 +- .../selected-views/AgentActionsDropdown.tsx | 6 +- .../useSelectedRunActions.ts | 13 +-- .../SelectedRunView/useSelectedRunView.ts | 25 ++--- .../SelectedScheduleView.tsx | 16 ++-- .../EditScheduleModal/useEditScheduleModal.ts | 5 +- .../useSelectedScheduleView.ts | 17 ++-- .../components/SelectedTemplateActions.tsx | 16 ++-- .../useSelectedTemplateView.ts | 11 +-- .../components/SelectedTriggerActions.tsx | 16 ++-- .../useSelectedTriggerView.ts | 3 +- .../components/TaskActionsDropdown.tsx | 6 +- .../sidebar/SidebarRunsList/helpers.ts | 44 --------- .../SidebarRunsList/useSidebarRunsList.ts | 24 +++-- .../useNewAgentLibraryView.ts | 9 +- .../agent-schedule-details-view.tsx | 8 +- .../components/cron-scheduler-dialog.tsx | 8 +- .../OldAgentLibraryView/use-agent-runs.ts | 38 +++----- .../LibraryAgentList/useLibraryAgentList.ts | 30 +++--- .../library/hooks/useFavoriteAgents.ts | 39 +++----- .../monitoring/components/SchedulesTable.tsx | 8 +- .../src/app/(platform)/monitoring/page.tsx | 4 +- .../components/APIKeySection/useAPISection.ts | 7 +- .../oauth-apps/components/useOAuthApps.ts | 3 +- .../profile/(user)/settings/page.tsx | 7 +- .../frontend/src/app/api/helpers.ts | 94 ++++++++++++++++++- .../frontend/src/app/api/openapi.json | 11 ++- .../GoogleDrivePicker/useGoogleDrivePicker.ts | 5 +- .../AgentSelectStep/AgentSelectStep.tsx | 6 +- .../AgentSelectStep/useAgentSelectStep.ts | 53 ++++++----- .../useAgentActivityDropdown.ts | 3 +- .../layout/Navbar/components/NavbarView.tsx | 3 +- .../FloatingReviewsPanel.tsx | 9 +- .../PendingReviewsList/PendingReviewsList.tsx | 12 +-- .../frontend/src/hooks/useAgentSafeMode.ts | 13 ++- .../frontend/src/hooks/usePendingReviews.ts | 5 +- .../frontend/src/lib/hooks/useUserTimezone.ts | 8 ++ .../src/lib/react-query/queryClient.ts | 4 + 57 files changed, 453 insertions(+), 542 deletions(-) delete mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts create mode 100644 autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts diff --git a/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py index 9d1df5f999..c4eba0befc 100644 --- a/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/review_routes_test.py @@ -55,7 +55,7 @@ def sample_pending_review(test_user_id: str) -> PendingHumanReviewModel: def test_get_pending_reviews_empty( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, snapshot: Snapshot, test_user_id: str, ) -> None: @@ -73,7 +73,7 @@ def test_get_pending_reviews_empty( def test_get_pending_reviews_with_data( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, snapshot: Snapshot, test_user_id: str, @@ -95,7 +95,7 @@ def test_get_pending_reviews_with_data( def test_get_pending_reviews_for_execution_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, snapshot: Snapshot, test_user_id: str, @@ -122,9 +122,8 @@ def test_get_pending_reviews_for_execution_success( assert data[0]["graph_exec_id"] == "test_graph_exec_456" -def test_get_pending_reviews_for_execution_access_denied( - mocker: pytest_mock.MockFixture, - test_user_id: str, +def test_get_pending_reviews_for_execution_not_available( + mocker: pytest_mock.MockerFixture, ) -> None: """Test access denied when user doesn't own the execution""" mock_get_graph_execution = mocker.patch( @@ -134,12 +133,12 @@ def test_get_pending_reviews_for_execution_access_denied( response = client.get("/api/review/execution/test_graph_exec_456") - assert response.status_code == 403 - assert "Access denied" in response.json()["detail"] + assert response.status_code == 404 + assert "not found" in response.json()["detail"] def test_process_review_action_approve_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -203,7 +202,7 @@ def test_process_review_action_approve_success( def test_process_review_action_reject_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -263,7 +262,7 @@ def test_process_review_action_reject_success( def test_process_review_action_mixed_success( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -370,7 +369,7 @@ def test_process_review_action_mixed_success( def test_process_review_action_empty_request( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, test_user_id: str, ) -> None: """Test error when no reviews provided""" @@ -387,7 +386,7 @@ def test_process_review_action_empty_request( def test_process_review_action_review_not_found( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, test_user_id: str, ) -> None: """Test error when review is not found""" @@ -423,7 +422,7 @@ def test_process_review_action_review_not_found( def test_process_review_action_partial_failure( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: @@ -457,7 +456,7 @@ def test_process_review_action_partial_failure( def test_process_review_action_invalid_node_exec_id( - mocker: pytest_mock.MockFixture, + mocker: pytest_mock.MockerFixture, sample_pending_review: PendingHumanReviewModel, test_user_id: str, ) -> None: diff --git a/autogpt_platform/backend/backend/api/features/executions/review/routes.py b/autogpt_platform/backend/backend/api/features/executions/review/routes.py index 4aa4fac49b..88646046da 100644 --- a/autogpt_platform/backend/backend/api/features/executions/review/routes.py +++ b/autogpt_platform/backend/backend/api/features/executions/review/routes.py @@ -67,8 +67,7 @@ async def list_pending_reviews( response_model=List[PendingHumanReviewModel], responses={ 200: {"description": "List of pending reviews for the execution"}, - 400: {"description": "Invalid graph execution ID"}, - 403: {"description": "Access denied to graph execution"}, + 404: {"description": "Graph execution not found"}, 500: {"description": "Server error", "content": {"application/json": {}}}, }, ) @@ -91,7 +90,7 @@ async def list_pending_reviews_for_execution( Raises: HTTPException: - - 403: If user doesn't own the graph execution + - 404: If the graph execution doesn't exist or isn't owned by this user - 500: If authentication fails or database error occurs Note: @@ -105,8 +104,8 @@ async def list_pending_reviews_for_execution( ) if not graph_exec: raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Access denied to graph execution", + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Graph execution #{graph_exec_id} not found", ) return await get_pending_reviews_for_execution(graph_exec_id, user_id) diff --git a/autogpt_platform/backend/backend/api/features/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py index 6a9bb05291..7d4db50d3f 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -173,7 +173,9 @@ async def get_agent(username: str, agent_name: str): tags=["store"], dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)], ) -async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: str): +async def get_graph_meta_by_store_listing_version_id( + store_listing_version_id: str, +) -> backend.data.graph.GraphMeta: """ Get Agent Graph from Store Listing Version ID. """ diff --git a/autogpt_platform/frontend/orval.config.ts b/autogpt_platform/frontend/orval.config.ts index de305c1acc..dff857e1b6 100644 --- a/autogpt_platform/frontend/orval.config.ts +++ b/autogpt_platform/frontend/orval.config.ts @@ -41,6 +41,12 @@ export default defineConfig({ useInfiniteQueryParam: "page", }, }, + "getV2List presets": { + query: { + useInfinite: true, + useInfiniteQueryParam: "page", + }, + }, "getV1List graph executions": { query: { useInfinite: true, diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx index 58960a0cf6..30e1b67090 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/page.tsx @@ -25,7 +25,7 @@ export default function Page() { ready, error, showInput, - agent, + agentGraph, onboarding, storeAgent, runningAgent, @@ -76,19 +76,19 @@ export default function Page() { Input - {Object.entries(agent?.input_schema.properties || {}).map( - ([key, inputSubSchema]) => ( - handleSetAgentInput(key, value)} - /> - ), - )} + {Object.entries( + agentGraph?.input_schema.properties || {}, + ).map(([key, inputSubSchema]) => ( + handleSetAgentInput(key, value)} + /> + ))} ) || undefined @@ -104,7 +104,7 @@ export default function Page() { className="mt-8 w-[136px]" loading={runningAgent} disabled={isRunDisabled({ - agent, + agent: agentGraph, isRunning: runningAgent, agentInputs: (onboarding.state?.agentInput as unknown as InputValues) || diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx index 37538a2191..f143c89d44 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/onboarding/5-run/useOnboardingRunStep.tsx @@ -1,6 +1,3 @@ -import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; -import { GraphMeta } from "@/app/api/__generated__/models/graphMeta"; -import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useBackendAPI } from "@/lib/autogpt-server-api/context"; import { useOnboarding } from "@/providers/onboarding/onboarding-provider"; @@ -8,20 +5,19 @@ import { useRouter } from "next/navigation"; import { useEffect, useState } from "react"; import { computeInitialAgentInputs } from "./helpers"; import { InputValues } from "./types"; +import { okData, resolveResponse } from "@/app/api/helpers"; +import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library"; import { useGetV2GetAgentByVersion, useGetV2GetAgentGraph, } from "@/app/api/__generated__/endpoints/store/store"; -import { resolveResponse } from "@/app/api/helpers"; -import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library"; +import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput"; import { GraphID } from "@/lib/autogpt-server-api"; export function useOnboardingRunStep() { const onboarding = useOnboarding(undefined, "AGENT_CHOICE"); const [showInput, setShowInput] = useState(false); - const [agent, setAgent] = useState(null); - const [storeAgent, setStoreAgent] = useState(null); const [runningAgent, setRunningAgent] = useState(false); const [inputCredentials, setInputCredentials] = useState< @@ -38,12 +34,26 @@ export function useOnboardingRunStep() { const currentAgentVersion = onboarding.state?.selectedStoreListingVersionId ?? ""; - const storeAgentQuery = useGetV2GetAgentByVersion(currentAgentVersion, { - query: { enabled: !!currentAgentVersion }, + const { + data: storeAgent, + error: storeAgentQueryError, + isSuccess: storeAgentQueryIsSuccess, + } = useGetV2GetAgentByVersion(currentAgentVersion, { + query: { + enabled: !!currentAgentVersion, + select: okData, + }, }); - const graphMetaQuery = useGetV2GetAgentGraph(currentAgentVersion, { - query: { enabled: !!currentAgentVersion }, + const { + data: agentGraphMeta, + error: agentGraphQueryError, + isSuccess: agentGraphQueryIsSuccess, + } = useGetV2GetAgentGraph(currentAgentVersion, { + query: { + enabled: !!currentAgentVersion, + select: okData, + }, }); useEffect(() => { @@ -51,29 +61,15 @@ export function useOnboardingRunStep() { }, []); useEffect(() => { - if (storeAgentQuery.data && storeAgentQuery.data.status === 200) { - setStoreAgent(storeAgentQuery.data.data); - } - }, [storeAgentQuery.data]); - - useEffect(() => { - if ( - graphMetaQuery.data && - graphMetaQuery.data.status === 200 && - onboarding.state - ) { - const graphMeta = graphMetaQuery.data.data as GraphMeta; - - setAgent(graphMeta); - - const update = computeInitialAgentInputs( - graphMeta, + if (agentGraphMeta && onboarding.state) { + const initialAgentInputs = computeInitialAgentInputs( + agentGraphMeta, (onboarding.state.agentInput as unknown as InputValues) || null, ); - onboarding.updateState({ agentInput: update }); + onboarding.updateState({ agentInput: initialAgentInputs }); } - }, [graphMetaQuery.data]); + }, [agentGraphMeta]); function handleNewRun() { if (!onboarding.state) return; @@ -95,7 +91,7 @@ export function useOnboardingRunStep() { } async function handleRunAgent() { - if (!agent || !storeAgent || !onboarding.state) { + if (!agentGraphMeta || !storeAgent || !onboarding.state) { toast({ title: "Error getting agent", description: @@ -142,12 +138,12 @@ export function useOnboardingRunStep() { } return { - ready: graphMetaQuery.isSuccess && storeAgentQuery.isSuccess, - error: graphMetaQuery.error || storeAgentQuery.error, - agent, + ready: agentGraphQueryIsSuccess && storeAgentQueryIsSuccess, + error: agentGraphQueryError || storeAgentQueryError, + agentGraph: agentGraphMeta || null, onboarding, showInput, - storeAgent, + storeAgent: storeAgent || null, runningAgent, credentialsValid, credentialsLoaded, diff --git a/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx b/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx index c24f9e11a3..1c37c6c72f 100644 --- a/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx +++ b/autogpt_platform/frontend/src/app/(no-navbar)/share/[token]/page.tsx @@ -1,6 +1,7 @@ "use client"; import { RunOutputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/RunOutputs"; +import { okData } from "@/app/api/helpers"; import { useGetV1GetSharedExecution } from "@/app/api/__generated__/endpoints/default/default"; import { Card, @@ -17,12 +18,11 @@ export default function SharePage() { const token = params.token as string; const { - data: response, + data: executionData, isLoading: loading, error, - } = useGetV1GetSharedExecution(token); + } = useGetV1GetSharedExecution(token, { query: { select: okData } }); - const executionData = response?.status === 200 ? response.data : undefined; const is404 = !loading && !executionData; if (loading) { diff --git a/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx b/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx index fd77628140..5aced56090 100644 --- a/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/admin/execution-analytics/components/ExecutionAnalyticsForm.tsx @@ -41,6 +41,7 @@ interface FormData extends Omit { // All other fields use the generated types as-is } import { AnalyticsResultsTable } from "./AnalyticsResultsTable"; +import { okData } from "@/app/api/helpers"; export function ExecutionAnalyticsForm() { const [results, setResults] = useState( @@ -178,7 +179,7 @@ export function ExecutionAnalyticsForm() { data: config, isLoading: configLoading, error: configError, - } = useGetV2GetExecutionAnalyticsConfiguration(); + } = useGetV2GetExecutionAnalyticsConfiguration({ query: { select: okData } }); const generateAnalytics = usePostV2GenerateExecutionAnalytics({ mutation: { @@ -231,10 +232,10 @@ export function ExecutionAnalyticsForm() { // Update form defaults when config loads useEffect(() => { - if (config?.data && config.status === 200 && !formData.model_name) { + if (config && !formData.model_name) { setFormData((prev) => ({ ...prev, - model_name: config.data.recommended_model, + model_name: config.recommended_model, })); } }, [config, formData.model_name]); @@ -307,7 +308,7 @@ export function ExecutionAnalyticsForm() { } // Show error state if config fails to load - if (configError || !config?.data || config.status !== 200) { + if (configError || !config) { return (
Failed to load configuration
@@ -315,8 +316,6 @@ export function ExecutionAnalyticsForm() { ); } - const configData = config.data; - return (
@@ -382,7 +381,7 @@ export function ExecutionAnalyticsForm() { - {configData.available_models.map((model) => ( + {config.available_models.map((model) => ( {model.label} @@ -442,7 +441,7 @@ export function ExecutionAnalyticsForm() { onChange={(e) => handleInputChange("system_prompt", e.target.value) } - placeholder={configData.default_system_prompt} + placeholder={config.default_system_prompt} rows={6} className="resize-y" /> @@ -463,7 +462,7 @@ export function ExecutionAnalyticsForm() { onChange={(e) => handleInputChange("user_prompt", e.target.value) } - placeholder={configData.default_user_prompt} + placeholder={config.default_user_prompt} rows={8} className="resize-y" /> @@ -490,7 +489,7 @@ export function ExecutionAnalyticsForm() { onClick={() => { handleInputChange( "system_prompt", - configData.default_system_prompt, + config.default_system_prompt, ); }} > @@ -503,7 +502,7 @@ export function ExecutionAnalyticsForm() { onClick={() => { handleInputChange( "user_prompt", - configData.default_user_prompt, + config.default_user_prompt, ); }} > diff --git a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx index 5163c46d5b..3372772c89 100644 --- a/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/auth/integrations/setup-wizard/page.tsx @@ -17,7 +17,6 @@ import type { import { CheckIcon, CircleIcon } from "@phosphor-icons/react"; import { useGetOauthGetOauthAppInfo } from "@/app/api/__generated__/endpoints/oauth/oauth"; import { okData } from "@/app/api/helpers"; -import { OAuthApplicationPublicInfo } from "@/app/api/__generated__/models/oAuthApplicationPublicInfo"; // All credential types - we accept any type of credential const ALL_CREDENTIAL_TYPES: CredentialsType[] = [ @@ -107,7 +106,7 @@ export default function IntegrationSetupWizardPage() { const state = searchParams.get("state"); const { data: appInfo } = useGetOauthGetOauthAppInfo(clientID || "", { - query: { enabled: !!clientID, select: okData }, + query: { enabled: !!clientID, select: okData }, }); // Parse providers from base64-encoded JSON diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts index 4d5f8bf254..1abfabbdba 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/CronSchedulerDialog/useCronSchedulerDialog.ts @@ -1,6 +1,6 @@ -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { usePostV1CreateExecutionSchedule } from "@/app/api/__generated__/endpoints/schedules/schedules"; import { useToast } from "@/components/molecules/Toast/use-toast"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; import { parseAsInteger, parseAsString, useQueryStates } from "nuqs"; import { useEffect, useState } from "react"; @@ -28,11 +28,7 @@ export const useCronSchedulerDialog = ({ flowExecutionID: parseAsString, }); - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const timezoneDisplay = getTimezoneDisplayName(userTimezone || "UTC"); const { mutateAsync: createSchedule, isPending: isCreatingSchedule } = diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx index d312fd487d..c9cf5296c6 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/Flow/Flow.tsx @@ -17,7 +17,6 @@ import { FloatingReviewsPanel } from "@/components/organisms/FloatingReviewsPane import { parseAsString, useQueryStates } from "nuqs"; import { CustomControls } from "./components/CustomControl"; import { useGetV1GetSpecificGraph } from "@/app/api/__generated__/endpoints/graphs/graphs"; -import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { okData } from "@/app/api/helpers"; import { TriggerAgentBanner } from "./components/TriggerAgentBanner"; import { resolveCollisions } from "./helpers/resolve-collision"; @@ -34,7 +33,7 @@ export const Flow = () => { {}, { query: { - select: okData, + select: okData, enabled: !!flowID, }, }, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx index 71888b62ee..de339431e8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/BlockMenuSearch.tsx @@ -14,7 +14,7 @@ import { NoSearchResult } from "../NoSearchResult"; export const BlockMenuSearch = () => { const { - allSearchData, + searchResults, isFetchingNextPage, fetchNextPage, hasNextPage, @@ -39,7 +39,7 @@ export const BlockMenuSearch = () => { ); } - if (allSearchData.length === 0) { + if (searchResults.length === 0) { return ; } @@ -53,7 +53,7 @@ export const BlockMenuSearch = () => { loader={} className="space-y-2.5" > - {allSearchData.map((item: SearchResponseItemsItem, index: number) => { + {searchResults.map((item: SearchResponseItemsItem, index: number) => { const { type, data } = getSearchItemType(item); // backend give support to these 3 types only [right now] - we need to give support to integration and ai agent types in follow up PRs switch (type) { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts index 3eb14d3ca9..beff80a984 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/BlockMenuSearch/useBlockMenuSearch.ts @@ -1,19 +1,25 @@ -import { useBlockMenuStore } from "../../../../stores/blockMenuStore"; -import { useGetV2BuilderSearchInfinite } from "@/app/api/__generated__/endpoints/store/store"; -import { SearchResponse } from "@/app/api/__generated__/models/searchResponse"; import { useCallback, useEffect, useState } from "react"; +import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store"; +import { + getPaginationNextPageNumber, + okData, + unpaginate, +} from "@/app/api/helpers"; +import { + getGetV2GetBuilderItemCountsQueryKey, + getGetV2GetBuilderSuggestionsQueryKey, +} from "@/app/api/__generated__/endpoints/default/default"; import { getGetV2ListLibraryAgentsQueryKey, getV2GetLibraryAgent, usePostV2AddMarketplaceAgent, } from "@/app/api/__generated__/endpoints/library/library"; import { - getGetV2GetBuilderItemCountsQueryKey, - getGetV2GetBuilderSuggestionsQueryKey, -} from "@/app/api/__generated__/endpoints/default/default"; + getV2GetSpecificAgent, + useGetV2BuilderSearchInfinite, +} from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { getQueryClient } from "@/lib/react-query/queryClient"; import { useToast } from "@/components/molecules/Toast/use-toast"; import * as Sentry from "@sentry/nextjs"; @@ -40,7 +46,7 @@ export const useBlockMenuSearch = () => { >(null); const { - data: searchData, + data: searchQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -53,18 +59,7 @@ export const useBlockMenuSearch = () => { search_id: searchId, }, { - query: { - getNextPageParam: (lastPage) => { - const response = lastPage.data as SearchResponse; - const { pagination } = response; - if (!pagination) { - return undefined; - } - - const { current_page, total_pages } = pagination; - return current_page < total_pages ? current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); @@ -93,16 +88,15 @@ export const useBlockMenuSearch = () => { }); useEffect(() => { - if (!searchData?.pages?.length) { + if (!searchQueryData?.pages?.length) { return; } - const latestPage = searchData.pages[searchData.pages.length - 1]; - const response = latestPage?.data as SearchResponse; - if (response?.search_id && response.search_id !== searchId) { - setSearchId(response.search_id); + const lastPage = okData(searchQueryData.pages.at(-1)); + if (lastPage?.search_id && lastPage.search_id !== searchId) { + setSearchId(lastPage.search_id); } - }, [searchData, searchId, setSearchId]); + }, [searchQueryData, searchId, setSearchId]); useEffect(() => { if (searchId && !searchQuery) { @@ -110,11 +104,9 @@ export const useBlockMenuSearch = () => { } }, [resetSearchSession, searchId, searchQuery]); - const allSearchData = - searchData?.pages?.flatMap((page) => { - const response = page.data as SearchResponse; - return response.items; - }) ?? []; + const searchResults = searchQueryData + ? unpaginate(searchQueryData, "items") + : []; const handleAddLibraryAgent = async (agent: LibraryAgent) => { setAddingLibraryAgentId(agent.id); @@ -177,7 +169,7 @@ export const useBlockMenuSearch = () => { }; return { - allSearchData, + searchResults, isFetchingNextPage, fetchNextPage, hasNextPage, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts index 678f903936..c6dcd61e36 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/IntegrationBlocks/useIntegrationBlocks.ts @@ -1,6 +1,10 @@ +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2GetBuilderBlocksInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { BlockResponse } from "@/app/api/__generated__/models/blockResponse"; -import { useBlockMenuStore } from "../../../../stores/blockMenuStore"; +import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore"; const PAGE_SIZE = 10; @@ -8,7 +12,7 @@ export const useIntegrationBlocks = () => { const { integration } = useBlockMenuStore(); const { - data: blocks, + data: blocksQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -22,30 +26,16 @@ export const useIntegrationBlocks = () => { provider: integration, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as BlockResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allBlocks = - blocks?.pages?.flatMap((page) => { - const response = page.data as BlockResponse; - return response.blocks; - }) ?? []; + const allBlocks = blocksQueryData + ? unpaginate(blocksQueryData, "blocks") + : []; + const totalBlocks = getPaginatedTotalCount(blocksQueryData); - const totalBlocks = blocks?.pages[0] - ? (blocks.pages[0].data as BlockResponse).pagination.total_items - : 0; - - const status = blocks?.pages[0]?.status; + const status = blocksQueryData?.pages[0]?.status; return { allBlocks, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts index ff9b70b79a..c45f36ae87 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MarketplaceAgentsContent/useMarketplaceAgentsContent.ts @@ -1,3 +1,4 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default"; import { getGetV2ListLibraryAgentsQueryKey, @@ -8,13 +9,12 @@ import { getV2GetSpecificAgent, useGetV2ListStoreAgentsInfinite, } from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { useToast } from "@/components/molecules/Toast/use-toast"; -import { StoreAgentsResponse } from "@/lib/autogpt-server-api"; import { getQueryClient } from "@/lib/react-query/queryClient"; import * as Sentry from "@sentry/nextjs"; import { useState } from "react"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; export const useMarketplaceAgentsContent = () => { const { toast } = useToast(); @@ -22,7 +22,7 @@ export const useMarketplaceAgentsContent = () => { const { addAgentToBuilder } = useAddAgentToBuilder(); const { - data: listStoreAgents, + data: storeAgentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -36,26 +36,14 @@ export const useMarketplaceAgentsContent = () => { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as StoreAgentsResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - listStoreAgents?.pages?.flatMap((page) => { - const response = page.data as StoreAgentsResponse; - return response.agents; - }) ?? []; - - const status = listStoreAgents?.pages[0]?.status; + const allAgents = storeAgentsQueryData + ? unpaginate(storeAgentsQueryData, "agents") + : []; + const status = storeAgentsQueryData?.pages[0]?.status; const { mutateAsync: addMarketplaceAgent } = usePostV2AddMarketplaceAgent({ mutation: { diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts index 88645393d7..5ce19afe96 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/MyAgentsContent/useMyAgentsContent.ts @@ -1,5 +1,5 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; -import { LibraryAgentResponse } from "@/app/api/__generated__/models/libraryAgentResponse"; import { useState } from "react"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder"; @@ -12,7 +12,7 @@ export const useMyAgentsContent = () => { const { toast } = useToast(); const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -26,26 +26,14 @@ export const useMyAgentsContent = () => { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as LibraryAgentResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - const response = page.data as LibraryAgentResponse; - return response.agents; - }) ?? []; - - const status = agents?.pages[0]?.status; + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const status = agentsQueryData?.pages[0]?.status; const handleAddBlock = async (agent: LibraryAgent) => { setSelectedAgentId(agent.id); diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts index 5348998021..b44fb871f3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedBlocksContent/usePaginatedBlocks.ts @@ -1,5 +1,5 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2GetBuilderBlocksInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { BlockResponse } from "@/app/api/__generated__/models/blockResponse"; interface UsePaginatedBlocksProps { type?: "all" | "input" | "action" | "output" | null; @@ -8,7 +8,7 @@ interface UsePaginatedBlocksProps { const PAGE_SIZE = 10; export const usePaginatedBlocks = ({ type }: UsePaginatedBlocksProps) => { const { - data: blocks, + data: blocksQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -22,26 +22,14 @@ export const usePaginatedBlocks = ({ type }: UsePaginatedBlocksProps) => { type, }, { - query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as BlockResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allBlocks = - blocks?.pages?.flatMap((page) => { - const response = page.data as BlockResponse; - return response.blocks; - }) ?? []; - - const status = blocks?.pages[0]?.status; + const allBlocks = blocksQueryData + ? unpaginate(blocksQueryData, "blocks") + : []; + const status = blocksQueryData?.pages[0]?.status; return { allBlocks, diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts index cf84ed94eb..3462b8f619 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/NewControlPanel/NewBlockMenu/PaginatedIntegrationList/usePaginatedIntegrationList.ts @@ -1,11 +1,11 @@ +import { getPaginationNextPageNumber, unpaginate } from "@/app/api/helpers"; import { useGetV2GetBuilderIntegrationProvidersInfinite } from "@/app/api/__generated__/endpoints/default/default"; -import { ProviderResponse } from "@/app/api/__generated__/models/providerResponse"; const PAGE_SIZE = 10; export const usePaginatedIntegrationList = () => { const { - data: providers, + data: providersQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -18,26 +18,14 @@ export const usePaginatedIntegrationList = () => { page_size: PAGE_SIZE, }, { - query: { - getNextPageParam: (lastPage: any) => { - const pagination = (lastPage.data as ProviderResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allProviders = - providers?.pages?.flatMap((page: any) => { - const response = page.data as ProviderResponse; - return response.providers; - }) ?? []; - - const status = providers?.pages[0]?.status; + const allProviders = providersQueryData + ? unpaginate(providersQueryData, "providers") + : []; + const status = providersQueryData?.pages[0]?.status; return { allProviders, diff --git a/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts b/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts index 0a350f98bb..99f4efc093 100644 --- a/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts +++ b/autogpt_platform/frontend/src/app/(platform)/chat/useChatSession.ts @@ -11,6 +11,7 @@ import { import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse"; import { storage, Key } from "@/services/storage/local-storage"; import { isValidUUID } from "@/app/(platform)/chat/helpers"; +import { okData } from "@/app/api/helpers"; interface UseChatSessionArgs { urlSessionId?: string | null; @@ -70,6 +71,7 @@ export function useChatSession({ } = useGetV2GetSession(sessionId || "", { query: { enabled: !!sessionId, + select: okData, staleTime: Infinity, // Never mark as stale refetchOnMount: false, // Don't refetch on component mount refetchOnWindowFocus: false, // Don't refetch when window regains focus @@ -81,9 +83,8 @@ export function useChatSession({ const { mutateAsync: claimSessionMutation } = usePatchV2SessionAssignUser(); const session = useMemo(() => { - if (sessionData?.status === 200) { - return sessionData.data; - } + if (sessionData) return sessionData; + if (sessionId && justCreatedSessionIdRef.current === sessionId) { return { id: sessionId, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx index d5d7c011a6..97ee4605f2 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/ScheduleAgentModal/components/TimezoneNotice/TimezoneNotice.tsx @@ -1,15 +1,11 @@ -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; import { InfoIcon } from "@phosphor-icons/react"; export function TimezoneNotice() { - const { data: userTimezone, isSuccess } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); - if (!isSuccess) { + if (!userTimezone) { return null; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx index e94878f070..834173cba4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/AgentActionsDropdown.tsx @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, getV1GetGraphVersion, useDeleteV1DeleteGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; @@ -127,9 +127,7 @@ export function AgentActionsDropdown({ toast({ title: "Task deleted" }); await queryClient.refetchQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); if (onClearSelectedRun) onClearSelectedRun(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts index 03fc0b4ae8..9bcfd9d964 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/useSelectedRunActions.ts @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, usePostV1ExecuteGraphAgent, usePostV1StopGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; @@ -11,6 +11,7 @@ import { } from "@/app/api/__generated__/endpoints/presets/presets"; import type { GraphExecution } from "@/app/api/__generated__/models/graphExecution"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useQueryClient } from "@tanstack/react-query"; import { useState } from "react"; @@ -58,9 +59,7 @@ export function useSelectedRunActions({ toast({ title: "Run stopped" }); await queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); } catch (error: unknown) { toast({ @@ -97,12 +96,10 @@ export function useSelectedRunActions({ }, }); - const newRunId = res?.status === 200 ? (res?.data?.id ?? "") : ""; + const newRunId = okData(res)?.id; await queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(agentGraphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agentGraphId), }); if (newRunId && onSelectRun) onSelectRun(newRunId); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts index 342241ef89..e3e035cea0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/useSelectedRunView.ts @@ -3,14 +3,12 @@ import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { useGetV2GetASpecificPreset } from "@/app/api/__generated__/endpoints/presets/presets"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; -import type { GetV1GetExecutionDetails200 } from "@/app/api/__generated__/models/getV1GetExecutionDetails200"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import { okData } from "@/app/api/helpers"; export function useSelectedRunView(graphId: string, runId: string) { - const query = useGetV1GetExecutionDetails(graphId, runId, { + const executionQuery = useGetV1GetExecutionDetails(graphId, runId, { query: { - refetchInterval: (q: any) => { + refetchInterval: (q) => { const isSuccess = q.state.data?.status === 200; if (!isSuccess) return false; @@ -33,22 +31,15 @@ export function useSelectedRunView(graphId: string, runId: string) { }, }); - const status = query.data?.status; + const run = okData(executionQuery.data); + const status = executionQuery.data?.status; - const run: GetV1GetExecutionDetails200 | undefined = - status === 200 - ? (query.data?.data as GetV1GetExecutionDetails200) - : undefined; - - const presetId = - run && "preset_id" in run && run.preset_id - ? (run.preset_id as string) - : undefined; + const presetId = run?.preset_id || undefined; const presetQuery = useGetV2GetASpecificPreset(presetId || "", { query: { enabled: !!presetId, - select: (res) => okData(res), + select: okData, }, }); @@ -60,8 +51,8 @@ export function useSelectedRunView(graphId: string, runId: string) { return { run, preset: presetQuery.data, - isLoading: query.isLoading || presetQuery.isLoading, - responseError: query.error || presetQuery.error, + isLoading: executionQuery.isLoading || presetQuery.isLoading, + responseError: executionQuery.error || presetQuery.error, httpError, } as const; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx index 0672ddc033..678f711097 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/SelectedScheduleView.tsx @@ -1,12 +1,12 @@ "use client"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; import { Text } from "@/components/atoms/Text/Text"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; import { isLargeScreen, useBreakpoint } from "@/lib/hooks/useBreakpoint"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { formatInTimezone, getTimezoneDisplayName } from "@/lib/timezone-utils"; import { AgentInputsReadOnly } from "../../modals/AgentInputsReadOnly/AgentInputsReadOnly"; import { LoadingSelectedContent } from "../LoadingSelectedContent"; @@ -36,11 +36,7 @@ export function SelectedScheduleView({ scheduleId, ); - const { data: userTzRes } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const breakpoint = useBreakpoint(); const isLgScreenUp = isLargeScreen(breakpoint); @@ -90,7 +86,7 @@ export function SelectedScheduleView({ run={undefined} scheduleRecurrence={ schedule - ? `${humanizeCronExpression(schedule.cron || "")} · ${getTimezoneDisplayName(schedule.timezone || userTzRes || "UTC")}` + ? `${humanizeCronExpression(schedule.cron || "")} · ${getTimezoneDisplayName(schedule.timezone || userTimezone || "UTC")}` : undefined } /> @@ -125,7 +121,7 @@ export function SelectedScheduleView({ {" "} {getTimezoneDisplayName( - schedule.timezone || userTzRes || "UTC", + schedule.timezone || userTimezone || "UTC", )} @@ -135,7 +131,7 @@ export function SelectedScheduleView({ {formatInTimezone( schedule.next_run_time, - userTzRes || "UTC", + userTimezone || "UTC", { year: "numeric", month: "long", @@ -148,7 +144,7 @@ export function SelectedScheduleView({ {" "} {getTimezoneDisplayName( - schedule.timezone || userTzRes || "UTC", + schedule.timezone || userTimezone || "UTC", )} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts index b006e775f9..427340a427 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/components/EditScheduleModal/useEditScheduleModal.ts @@ -1,7 +1,7 @@ "use client"; -import { getGetV1ListGraphExecutionsInfiniteQueryOptions } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV1ListExecutionSchedulesForAGraphQueryKey } from "@/app/api/__generated__/endpoints/schedules/schedules"; +import { getGetV1ListGraphExecutionsQueryKey } from "@/app/api/__generated__/endpoints/graphs/graphs"; import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { useMutation, useQueryClient } from "@tanstack/react-query"; @@ -94,8 +94,7 @@ export function useEditScheduleModal( await queryClient.invalidateQueries({ queryKey: getGetV1ListExecutionSchedulesForAGraphQueryKey(graphId), }); - const runsKey = getGetV1ListGraphExecutionsInfiniteQueryOptions(graphId) - .queryKey as any; + const runsKey = getGetV1ListGraphExecutionsQueryKey(graphId); await queryClient.invalidateQueries({ queryKey: runsKey }); setIsOpen(false); }, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts index 01905eb296..66263e2dcc 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedScheduleView/useSelectedScheduleView.ts @@ -2,30 +2,29 @@ import { useMemo } from "react"; import { useGetV1ListExecutionSchedulesForAGraph } from "@/app/api/__generated__/endpoints/schedules/schedules"; -import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; +import { okData } from "@/app/api/helpers"; export function useSelectedScheduleView(graphId: string, scheduleId: string) { - const query = useGetV1ListExecutionSchedulesForAGraph(graphId, { + const schedulesQuery = useGetV1ListExecutionSchedulesForAGraph(graphId, { query: { enabled: !!graphId, - select: (res) => - res.status === 200 ? (res.data as GraphExecutionJobInfo[]) : [], + select: okData, }, }); const schedule = useMemo( - () => query.data?.find((s) => s.id === scheduleId), - [query.data, scheduleId], + () => schedulesQuery.data?.find((s) => s.id === scheduleId), + [schedulesQuery.data, scheduleId], ); const httpError = - query.isSuccess && !schedule + schedulesQuery.isSuccess && !schedule ? { status: 404, statusText: "Not found" } : undefined; return { schedule, - isLoading: query.isLoading, - error: query.error || httpError, + isLoading: schedulesQuery.isLoading, + error: schedulesQuery.error || httpError, } as const; } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx index 1d50ec7c85..008d2cc379 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/components/SelectedTemplateActions.tsx @@ -2,10 +2,10 @@ import { getGetV2ListPresetsQueryKey, + getV2ListPresets, useDeleteV2DeleteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; import { okData } from "@/app/api/helpers"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; @@ -56,15 +56,13 @@ export function SelectedTemplateActions({ queryKey, }); - const queryData = queryClient.getQueryData<{ - data: LibraryAgentPresetResponse; - }>(queryKey); + const queryData = + queryClient.getQueryData< + Awaited> + >(queryKey); - const presets = - okData(queryData)?.presets ?? []; - const templates = presets.filter( - (preset) => !preset.webhook_id || !preset.webhook, - ); + const presets = okData(queryData)?.presets ?? []; + const templates = presets.filter((preset) => !preset.webhook_id); setShowDeleteDialog(false); onDeleted?.(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts index a0f34f54a2..66dd26f488 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTemplateView/useSelectedTemplateView.ts @@ -1,6 +1,6 @@ "use client"; -import { getGetV1ListGraphExecutionsInfiniteQueryOptions } from "@/app/api/__generated__/endpoints/graphs/graphs"; +import { getGetV1ListGraphExecutionsQueryKey } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { getGetV2GetASpecificPresetQueryKey, getGetV2ListPresetsQueryKey, @@ -9,7 +9,6 @@ import { usePostV2ExecuteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import type { LibraryAgentPresetUpdatable } from "@/app/api/__generated__/models/libraryAgentPresetUpdatable"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; @@ -34,7 +33,7 @@ export function useSelectedTemplateView({ const query = useGetV2GetASpecificPreset(templateId, { query: { enabled: !!templateId, - select: (res) => okData(res), + select: okData, }, }); @@ -83,15 +82,13 @@ export function useSelectedTemplateView({ mutation: { onSuccess: (response) => { if (response.status === 200) { - const execution = okData(response); + const execution = okData(response); if (execution) { toast({ title: "Task started", }); queryClient.invalidateQueries({ - queryKey: - getGetV1ListGraphExecutionsInfiniteQueryOptions(graphId) - .queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(graphId), }); onRunCreated?.(execution); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx index 0746027f37..a5b895c3fa 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/components/SelectedTriggerActions.tsx @@ -2,10 +2,10 @@ import { getGetV2ListPresetsQueryKey, + getV2ListPresets, useDeleteV2DeleteAPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; import { okData } from "@/app/api/helpers"; import { Button } from "@/components/atoms/Button/Button"; import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner"; @@ -52,15 +52,13 @@ export function SelectedTriggerActions({ queryKey, }); - const queryData = queryClient.getQueryData<{ - data: LibraryAgentPresetResponse; - }>(queryKey); + const queryData = + queryClient.getQueryData< + Awaited> + >(queryKey); - const presets = - okData(queryData)?.presets ?? []; - const triggers = presets.filter( - (preset) => preset.webhook_id && preset.webhook, - ); + const presets = okData(queryData)?.presets ?? []; + const triggers = presets.filter((preset) => preset.webhook_id); setShowDeleteDialog(false); onDeleted?.(); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts index 4669d850b2..235c653134 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedTriggerView/useSelectedTriggerView.ts @@ -6,7 +6,6 @@ import { useGetV2GetASpecificPreset, usePatchV2UpdateAnExistingPreset, } from "@/app/api/__generated__/endpoints/presets/presets"; -import type { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import type { LibraryAgentPresetUpdatable } from "@/app/api/__generated__/models/libraryAgentPresetUpdatable"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; @@ -26,7 +25,7 @@ export function useSelectedTriggerView({ triggerId, graphId }: Args) { const query = useGetV2GetASpecificPreset(triggerId, { query: { enabled: !!triggerId, - select: (res) => okData(res), + select: okData, }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx index 95cc7740f8..ba923bca68 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/components/TaskActionsDropdown.tsx @@ -1,7 +1,7 @@ "use client"; import { - getGetV1ListGraphExecutionsInfiniteQueryOptions, + getGetV1ListGraphExecutionsQueryKey, useDeleteV1DeleteGraphExecution, } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { @@ -51,9 +51,7 @@ export function TaskActionsDropdown({ agent, run, onDeleted }: Props) { toast({ title: "Task deleted" }); await queryClient.refetchQueries({ - queryKey: getGetV1ListGraphExecutionsInfiniteQueryOptions( - agent.graph_id, - ).queryKey, + queryKey: getGetV1ListGraphExecutionsQueryKey(agent.graph_id), }); setShowDeleteDialog(false); diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts deleted file mode 100644 index 096e40239b..0000000000 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/helpers.ts +++ /dev/null @@ -1,44 +0,0 @@ -import type { GraphExecutionsPaginated } from "@/app/api/__generated__/models/graphExecutionsPaginated"; -import type { InfiniteData } from "@tanstack/react-query"; - -function hasValidExecutionsData( - page: unknown, -): page is { data: GraphExecutionsPaginated } { - return ( - typeof page === "object" && - page !== null && - "data" in page && - typeof (page as { data: unknown }).data === "object" && - (page as { data: unknown }).data !== null && - "executions" in (page as { data: GraphExecutionsPaginated }).data - ); -} - -export function computeRunsCount( - infiniteData: InfiniteData | undefined, - runsLength: number, -): number { - const lastPage = infiniteData?.pages.at(-1); - if (!hasValidExecutionsData(lastPage)) return runsLength; - return lastPage.data.pagination?.total_items || runsLength; -} - -export function getNextRunsPageParam(lastPage: unknown): number | undefined { - if (!hasValidExecutionsData(lastPage)) return undefined; - - const { pagination } = lastPage.data; - const hasMore = - pagination.current_page * pagination.page_size < pagination.total_items; - return hasMore ? pagination.current_page + 1 : undefined; -} - -export function extractRunsFromPages( - infiniteData: InfiniteData | undefined, -) { - return ( - infiniteData?.pages.flatMap((page) => { - if (!hasValidExecutionsData(page)) return []; - return page.data.executions || []; - }) || [] - ); -} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts index 7f7155bbdf..971b90c2e3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/sidebar/SidebarRunsList/useSidebarRunsList.ts @@ -2,20 +2,18 @@ import { useEffect, useMemo } from "react"; +import { + okData, + getPaginationNextPageNumber, + getPaginatedTotalCount, + unpaginate, +} from "@/app/api/helpers"; import { useGetV1ListGraphExecutionsInfinite } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { useGetV2ListPresets } from "@/app/api/__generated__/endpoints/presets/presets"; import { useGetV1ListExecutionSchedulesForAGraph } from "@/app/api/__generated__/endpoints/schedules/schedules"; -import type { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; -import type { LibraryAgentPresetResponse } from "@/app/api/__generated__/models/libraryAgentPresetResponse"; -import { okData } from "@/app/api/helpers"; import { useExecutionEvents } from "@/hooks/useExecutionEvents"; import { useQueryClient } from "@tanstack/react-query"; import { parseAsString, useQueryStates } from "nuqs"; -import { - computeRunsCount, - extractRunsFromPages, - getNextRunsPageParam, -} from "./helpers"; function parseTab( value: string | null, @@ -66,7 +64,7 @@ export function useSidebarRunsList({ query: { enabled: !!graphId, refetchOnWindowFocus: false, - getNextPageParam: getNextRunsPageParam, + getNextPageParam: getPaginationNextPageNumber, }, }, ); @@ -74,7 +72,7 @@ export function useSidebarRunsList({ const schedulesQuery = useGetV1ListExecutionSchedulesForAGraph(graphId, { query: { enabled: !!graphId, - select: (r) => okData(r), + select: okData, }, }); @@ -83,13 +81,13 @@ export function useSidebarRunsList({ { query: { enabled: !!graphId, - select: (r) => okData(r)?.presets, + select: (r) => okData(r)?.presets, }, }, ); const runs = useMemo( - () => extractRunsFromPages(runsQuery.data), + () => (runsQuery.data ? unpaginate(runsQuery.data, "executions") : []), [runsQuery.data], ); @@ -104,7 +102,7 @@ export function useSidebarRunsList({ [allPresets], ); - const runsCount = computeRunsCount(runsQuery.data, runs.length); + const runsCount = getPaginatedTotalCount(runsQuery.data, runs.length); const schedulesCount = schedules.length; const templatesCount = templates.length; const triggersCount = triggers.length; diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts index 394edb1a6d..b4cc2baca8 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/useNewAgentLibraryView.ts @@ -2,7 +2,6 @@ import { useGetV2GetLibraryAgent } from "@/app/api/__generated__/endpoints/libra import { useGetV2GetASpecificPreset } from "@/app/api/__generated__/endpoints/presets/presets"; import { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo"; import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; -import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset"; import { okData } from "@/app/api/helpers"; import { useParams } from "next/navigation"; @@ -31,11 +30,7 @@ export function useNewAgentLibraryView() { data: agent, isSuccess, error, - } = useGetV2GetLibraryAgent(agentId, { - query: { - select: okData, - }, - }); + } = useGetV2GetLibraryAgent(agentId, { query: { select: okData } }); const [{ activeItem, activeTab: activeTabRaw }, setQueryStates] = useQueryStates({ @@ -53,7 +48,7 @@ export function useNewAgentLibraryView() { } = useGetV2GetASpecificPreset(activeItem ?? "", { query: { enabled: Boolean(activeTab === "templates" && activeItem), - select: okData, + select: okData, }, }); const activeTemplate = diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx index 414aa3863b..61161088fc 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-schedule-details-view.tsx @@ -23,7 +23,7 @@ import LoadingBox from "@/components/__legacy__/ui/loading"; import { useToastOnFail } from "@/components/molecules/Toast/use-toast"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; import { formatScheduleTime } from "@/lib/timezone-utils"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { PlayIcon } from "lucide-react"; import { AgentRunStatus } from "./agent-run-status-chip"; @@ -48,11 +48,7 @@ export function AgentScheduleDetailsView({ const toastOnFail = useToastOnFail(); // Get user's timezone for displaying schedule times - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const infoStats: { label: string; value: React.ReactNode }[] = useMemo(() => { return [ diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx index e998823a89..30c3e7d777 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog.tsx @@ -4,8 +4,8 @@ import { Button } from "@/components/__legacy__/ui/button"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { CronScheduler } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler"; import { Dialog } from "@/components/molecules/Dialog/Dialog"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { getTimezoneDisplayName } from "@/lib/timezone-utils"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { InfoIcon } from "lucide-react"; // Base type for cron expression only @@ -50,11 +50,7 @@ export function CronSchedulerDialog(props: CronSchedulerDialogProps) { ); // Get user's timezone - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : undefined), - }, - }); + const userTimezone = useUserTimezone(); const timezoneDisplay = getTimezoneDisplayName(userTimezone || "UTC"); // Reset state when dialog opens diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts index f997726e21..c74a37e6d0 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs.ts @@ -1,15 +1,20 @@ +import { + GraphExecutionMeta as LegacyGraphExecutionMeta, + GraphID, + GraphExecutionID, +} from "@/lib/autogpt-server-api"; +import { getQueryClient } from "@/lib/react-query/queryClient"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { getV1ListGraphExecutionsResponse, getV1ListGraphExecutionsResponse200, useGetV1ListGraphExecutionsInfinite, } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { GraphExecutionsPaginated } from "@/app/api/__generated__/models/graphExecutionsPaginated"; -import { getQueryClient } from "@/lib/react-query/queryClient"; -import { - GraphExecutionMeta as LegacyGraphExecutionMeta, - GraphID, - GraphExecutionID, -} from "@/lib/autogpt-server-api"; import { GraphExecutionMeta as RawGraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta"; export type GraphExecutionMeta = Omit< @@ -44,15 +49,7 @@ export const useAgentRunsInfinite = (graphID?: GraphID) => { { page: 1, page_size: 20 }, { query: { - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as GraphExecutionsPaginated) - .pagination; - const hasMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return hasMore ? pagination.current_page + 1 : undefined; - }, + getNextPageParam: getPaginationNextPageNumber, // Prevent query from running if graphID is not available (yet) ...(!graphID @@ -80,15 +77,8 @@ export const useAgentRunsInfinite = (graphID?: GraphID) => { queryClient, ); - const agentRuns = - queryResults?.pages.flatMap((page) => { - const response = page.data as GraphExecutionsPaginated; - return response.executions; - }) ?? []; - - const agentRunCount = ( - queryResults?.pages.at(-1)?.data as GraphExecutionsPaginated | undefined - )?.pagination.total_items; + const agentRuns = queryResults ? unpaginate(queryResults, "executions") : []; + const agentRunCount = getPaginatedTotalCount(queryResults); const upsertAgentRun = (newAgentRun: GraphExecutionMeta) => { queryClient.setQueryData( diff --git a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts index 8ae2c659a6..e9db9a02da 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/components/LibraryAgentList/useLibraryAgentList.ts @@ -1,7 +1,11 @@ "use client"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; -import { LibraryAgentResponse } from "@/app/api/__generated__/models/libraryAgentResponse"; import { useLibraryPageContext } from "../state-provider"; import { useLibraryAgentsStore } from "@/hooks/useLibraryAgents/store"; import { getInitialData } from "./helpers"; @@ -11,7 +15,7 @@ export const useLibraryAgentList = () => { const { agents: cachedAgents } = useLibraryAgentsStore(); const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -26,27 +30,15 @@ export const useLibraryAgentList = () => { { query: { initialData: getInitialData(cachedAgents, searchTerm, 8), - getNextPageParam: (lastPage) => { - const pagination = (lastPage.data as LibraryAgentResponse).pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, + getNextPageParam: getPaginationNextPageNumber, }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - const response = page.data as LibraryAgentResponse; - return response.agents; - }) ?? []; - - const agentCount = agents?.pages?.[0] - ? (agents.pages[0].data as LibraryAgentResponse).pagination.total_items - : 0; + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const agentCount = getPaginatedTotalCount(agentsQueryData); return { allAgents, diff --git a/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts b/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts index 633ad72712..933670ca80 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts +++ b/autogpt_platform/frontend/src/app/(platform)/library/hooks/useFavoriteAgents.ts @@ -1,10 +1,15 @@ "use client"; +import { + getPaginatedTotalCount, + getPaginationNextPageNumber, + unpaginate, +} from "@/app/api/helpers"; import { useGetV2ListFavoriteLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library"; export function useFavoriteAgents() { const { - data: agents, + data: agentsQueryData, fetchNextPage, hasNextPage, isFetchingNextPage, @@ -15,36 +20,14 @@ export function useFavoriteAgents() { page_size: 10, }, { - query: { - getNextPageParam: (lastPage) => { - // Only paginate on successful responses - if (!lastPage || lastPage.status !== 200) return undefined; - - const pagination = lastPage.data.pagination; - const isMore = - pagination.current_page * pagination.page_size < - pagination.total_items; - - return isMore ? pagination.current_page + 1 : undefined; - }, - }, + query: { getNextPageParam: getPaginationNextPageNumber }, }, ); - const allAgents = - agents?.pages?.flatMap((page) => { - // Only process successful responses - if (!page || page.status !== 200) return []; - const response = page.data; - return response?.agents || []; - }) ?? []; - - const agentCount = (() => { - const firstPage = agents?.pages?.[0]; - // Only count from successful responses - if (!firstPage || firstPage.status !== 200) return 0; - return firstPage.data?.pagination?.total_items || 0; - })(); + const allAgents = agentsQueryData + ? unpaginate(agentsQueryData, "agents") + : []; + const agentCount = getPaginatedTotalCount(agentsQueryData); return { allAgents, diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx index f069510b01..ad35db11b1 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/components/SchedulesTable.tsx @@ -15,11 +15,11 @@ import { ScrollArea } from "@/components/__legacy__/ui/scroll-area"; import { ClockIcon, Loader2 } from "lucide-react"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { humanizeCronExpression } from "@/lib/cron-expression-utils"; +import { useUserTimezone } from "@/lib/hooks/useUserTimezone"; import { formatScheduleTime, getTimezoneAbbreviation, } from "@/lib/timezone-utils"; -import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; import { Select, SelectContent, @@ -66,11 +66,7 @@ export const SchedulesTable = ({ const [selectedFilter, setSelectedFilter] = useState(""); // Graph ID // Get user's timezone for displaying schedule times - const { data: userTimezone } = useGetV1GetUserTimezone({ - query: { - select: (res) => (res.status === 200 ? res.data.timezone : "UTC"), - }, - }); + const userTimezone = useUserTimezone() ?? "UTC"; const filteredAndSortedSchedules = [...schedules] .filter( diff --git a/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx b/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx index 5e70245ac9..3b5aa46839 100644 --- a/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/monitoring/page.tsx @@ -7,6 +7,7 @@ import { useGetV1ListExecutionSchedulesForAUser, useDeleteV1DeleteExecutionSchedule, } from "@/app/api/__generated__/endpoints/schedules/schedules"; +import { okData } from "@/app/api/helpers"; import { Card } from "@/components/__legacy__/ui/card"; import { SchedulesTable } from "@/app/(platform)/monitoring/components/SchedulesTable"; @@ -34,8 +35,7 @@ const Monitor = () => { useGetV1ListExecutionSchedulesForAUser(); const deleteScheduleMutation = useDeleteV1DeleteExecutionSchedule(); - const schedules = - schedulesResponse?.status === 200 ? schedulesResponse.data : []; + const schedules = okData(schedulesResponse) ?? []; const removeSchedule = useCallback( async (scheduleId: string) => { diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts index 5fe691f025..d4ad54162e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/api-keys/components/APIKeySection/useAPISection.ts @@ -4,6 +4,7 @@ import { useDeleteV1RevokeApiKey, useGetV1ListUserApiKeys, } from "@/app/api/__generated__/endpoints/api-keys/api-keys"; +import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { getQueryClient } from "@/lib/react-query/queryClient"; @@ -13,11 +14,7 @@ export const useAPISection = () => { const { data: apiKeys, isLoading } = useGetV1ListUserApiKeys({ query: { - select: (res) => { - if (res.status !== 200) return undefined; - - return res.data.filter((key) => key.status === "ACTIVE"); - }, + select: (res) => okData(res)?.filter((key) => key.status === "ACTIVE"), }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts index 5b5afc5783..cf9749c53a 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/oauth-apps/components/useOAuthApps.ts @@ -7,7 +7,6 @@ import { usePostOauthUploadAppLogo, getGetOauthListMyOauthAppsQueryKey, } from "@/app/api/__generated__/endpoints/oauth/oauth"; -import { OAuthApplicationInfo } from "@/app/api/__generated__/models/oAuthApplicationInfo"; import { okData } from "@/app/api/helpers"; import { useToast } from "@/components/molecules/Toast/use-toast"; import { getQueryClient } from "@/lib/react-query/queryClient"; @@ -19,7 +18,7 @@ export const useOAuthApps = () => { const [uploadingAppId, setUploadingAppId] = useState(null); const { data: oauthAppsResponse, isLoading } = useGetOauthListMyOauthApps({ - query: { select: okData }, + query: { select: okData }, }); const { mutateAsync: updateStatus } = usePatchOauthUpdateAppStatus({ diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx index f0eb8a6b8c..8b4d48de83 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/settings/page.tsx @@ -6,6 +6,7 @@ import { useGetV1GetNotificationPreferences, useGetV1GetUserTimezone, } from "@/app/api/__generated__/endpoints/auth/auth"; +import { okData } from "@/app/api/helpers"; import { Text } from "@/components/atoms/Text/Text"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; @@ -24,7 +25,7 @@ export default function SettingsPage() { } = useGetV1GetNotificationPreferences({ query: { enabled: !!user, - select: (res) => (res.status === 200 ? res.data : null), + select: okData, }, }); @@ -32,9 +33,7 @@ export default function SettingsPage() { useGetV1GetUserTimezone({ query: { enabled: !!user, - select: (res) => { - return res.status === 200 ? String(res.data.timezone) : "not-set"; - }, + select: (res) => okData(res)?.timezone ?? "not-set", }, }); diff --git a/autogpt_platform/frontend/src/app/api/helpers.ts b/autogpt_platform/frontend/src/app/api/helpers.ts index 2ed45c9517..e9a708ba4c 100644 --- a/autogpt_platform/frontend/src/app/api/helpers.ts +++ b/autogpt_platform/frontend/src/app/api/helpers.ts @@ -1,7 +1,12 @@ +import type { InfiniteData } from "@tanstack/react-query"; import { getV1IsOnboardingEnabled, getV1OnboardingState, } from "./__generated__/endpoints/onboarding/onboarding"; +import { Pagination } from "./__generated__/models/pagination"; + +export type OKData = + (TResponse & { status: 200 })["data"]; /** * Narrow an orval response to its success payload if and only if it is a `200` status with OK shape. @@ -9,13 +14,15 @@ import { * Usage with React Query select: * ```ts * const { data: agent } = useGetV2GetLibraryAgent(agentId, { - * query: { select: okData }, + * query: { select: okData }, * }); * * data // is now properly typed as LibraryAgent | undefined * ``` */ -export function okData(res: unknown): T | undefined { +export function okData( + res: TResponse | undefined, +): OKData | undefined { if (!res || typeof res !== "object") return undefined; // status must exist and be exactly 200 @@ -26,7 +33,88 @@ export function okData(res: unknown): T | undefined { // check presence to safely return it as T; the generic T is enforced at call sites. if (!("data" in (res as Record))) return undefined; - return (res as { data: T }).data; + return res.data; +} + +export function getPaginatedTotalCount( + infiniteData: InfiniteData | undefined, + fallbackCount?: number, +): number { + const lastPage = infiniteData?.pages.at(-1); + if (!hasValidPaginationInfo(lastPage)) return fallbackCount ?? 0; + return lastPage.data.pagination.total_items ?? fallbackCount ?? 0; +} + +export function getPaginationNextPageNumber( + lastPage: + | { data: { pagination?: Pagination; [key: string]: any } } + | undefined, +): number | undefined { + if (!hasValidPaginationInfo(lastPage)) return undefined; + + const { pagination } = lastPage.data; + const hasMore = + pagination.current_page * pagination.page_size < pagination.total_items; + return hasMore ? pagination.current_page + 1 : undefined; +} + +/** Make one list from a paginated infinite query result. */ +export function unpaginate< + TResponse extends { status: number; data: any }, + TPageDataKey extends { + // Only allow keys for which the value is an array: + [K in keyof OKData]: OKData[K] extends any[] + ? K + : never; + }[keyof OKData] & + string, + TItemData extends OKData[TPageDataKey][number], +>( + infiniteData: InfiniteData, + pageListKey: TPageDataKey, +): TItemData[] { + return ( + infiniteData?.pages.flatMap((page) => { + if (!hasValidListPage(page, pageListKey)) return []; + return page.data[pageListKey] || []; + }) || [] + ); +} + +function hasValidListPage( + page: unknown, + pageListKey: TKey, +): page is { status: 200; data: { [key in TKey]: any[] } } { + return ( + typeof page === "object" && + page !== null && + "status" in page && + page.status === 200 && + "data" in page && + typeof page.data === "object" && + page.data !== null && + pageListKey in page.data && + Array.isArray((page.data as Record)[pageListKey]) + ); +} + +function hasValidPaginationInfo( + page: unknown, +): page is { data: { pagination: Pagination; [key: string]: any } } { + return ( + typeof page === "object" && + page !== null && + "data" in page && + typeof page.data === "object" && + page.data !== null && + "pagination" in page.data && + typeof page.data.pagination === "object" && + page.data.pagination !== null && + "total_items" in page.data.pagination && + "total_pages" in page.data.pagination && + "current_page" in page.data.pagination && + "page_size" in page.data.pagination + ); } type ResponseWithData = { status: number; data: unknown }; diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 61a3600892..2ead2189ed 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -4624,7 +4624,7 @@ "get": { "tags": ["v2", "executions", "review", "v2", "executions", "review"], "summary": "Get Pending Reviews for Execution", - "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 403: If user doesn't own the graph execution\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", + "description": "Get all pending reviews for a specific graph execution.\n\nRetrieves all reviews with status \"WAITING\" for the specified graph execution\nthat belong to the authenticated user. Results are ordered by creation time\n(oldest first) to preserve review order within the execution.\n\nArgs:\n graph_exec_id: ID of the graph execution to get reviews for\n user_id: Authenticated user ID from security dependency\n\nReturns:\n List of pending review objects for the specified execution\n\nRaises:\n HTTPException:\n - 404: If the graph execution doesn't exist or isn't owned by this user\n - 500: If authentication fails or database error occurs\n\nNote:\n Only returns reviews owned by the authenticated user for security.\n Reviews with invalid status are excluded with warning logs.", "operationId": "getV2Get pending reviews for execution", "security": [{ "HTTPBearerJWT": [] }], "parameters": [ @@ -4650,11 +4650,10 @@ } } }, - "400": { "description": "Invalid graph execution ID" }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" }, - "403": { "description": "Access denied to graph execution" }, + "404": { "description": "Graph execution not found" }, "422": { "description": "Validation Error", "content": { @@ -5349,7 +5348,11 @@ "responses": { "200": { "description": "Successful Response", - "content": { "application/json": { "schema": {} } } + "content": { + "application/json": { + "schema": { "$ref": "#/components/schemas/GraphMeta" } + } + } }, "401": { "$ref": "#/components/responses/HTTP401NotAuthenticatedError" diff --git a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts index 66386882c6..f6478f6c2b 100644 --- a/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts +++ b/autogpt_platform/frontend/src/components/contextual/GoogleDrivePicker/useGoogleDrivePicker.ts @@ -15,6 +15,7 @@ import { normalizePickerResponse, scopesIncludeDrive, } from "./helpers"; +import { okData } from "@/app/api/helpers"; const defaultScopes = ["https://www.googleapis.com/auth/drive.file"]; @@ -126,9 +127,9 @@ export function useGoogleDrivePicker(options: Props) { ); const response = await queryClient.fetchQuery(queryOptions); + const cred = okData(response); - if (response.status === 200 && response.data) { - const cred = response.data; + if (cred) { if (cred.type === "oauth2") { const oauthCred = cred as OAuth2Credentials; if (oauthCred.access_token) { diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx index 2766f2d477..896840ba08 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentSelectStep/AgentSelectStep.tsx @@ -34,7 +34,7 @@ export function AgentSelectStep({ }: Props) { const { // Data - agents, + myAgents, isLoading, error, // State @@ -99,7 +99,7 @@ export function AgentSelectStep({ description="Select your project that you'd like to publish" /> - {agents.length === 0 ? ( + {myAgents.length === 0 ? (
Uh-oh.. It seems like you don't have any agents in your @@ -130,7 +130,7 @@ export function AgentSelectStep({
- {agents.map((agent) => ( + {myAgents.map((agent) => (
(null); - const { data: myAgents, isLoading, error } = useGetV2GetMyAgents(); - - const agents: Agent[] = - (myAgents?.status === 200 && - myAgents.data.agents - .map( - (agent): Agent => ({ - name: agent.agent_name, - id: agent.agent_id, - version: agent.agent_version, - lastEdited: agent.last_edited.toLocaleDateString(), - imageSrc: agent.agent_image || "https://picsum.photos/300/200", - description: agent.description || "", - recommendedScheduleCron: agent.recommended_schedule_cron ?? null, - }), - ) - .sort( - (a: Agent, b: Agent) => - new Date(b.lastEdited).getTime() - new Date(a.lastEdited).getTime(), - )) || - []; + const { + data: _myAgents, + isLoading, + error, + } = useGetV2GetMyAgents(undefined, { + query: { + select: (res) => + okData(res) + ?.agents.map( + (agent): Agent => ({ + name: agent.agent_name, + id: agent.agent_id, + version: agent.agent_version, + lastEdited: agent.last_edited.toLocaleDateString(), + imageSrc: agent.agent_image || "https://picsum.photos/300/200", + description: agent.description || "", + recommendedScheduleCron: agent.recommended_schedule_cron ?? null, + }), + ) + .sort( + (a: Agent, b: Agent) => + new Date(b.lastEdited).getTime() - + new Date(a.lastEdited).getTime(), + ), + }, + }); + const myAgents = _myAgents ?? []; const handleAgentClick = ( _: string, @@ -70,7 +77,7 @@ export function useAgentSelectStep({ const handleNext = () => { if (selectedAgentId && selectedAgentVersion) { - const selectedAgent = agents.find( + const selectedAgent = myAgents.find( (agent) => agent.id === selectedAgentId, ); if (selectedAgent) { @@ -86,7 +93,7 @@ export function useAgentSelectStep({ return { // Data - agents, + myAgents, isLoading, error, // State diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts index df8402906b..9dbd8aaf7e 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/AgentActivityDropdown/useAgentActivityDropdown.ts @@ -4,6 +4,7 @@ import { useExecutionEvents } from "@/hooks/useExecutionEvents"; import { useLibraryAgents } from "@/hooks/useLibraryAgents/useLibraryAgents"; import type { GraphExecution } from "@/lib/autogpt-server-api/types"; import { useCallback, useEffect, useMemo, useState } from "react"; +import { okData } from "@/app/api/helpers"; import { NotificationState, categorizeExecutions, @@ -26,7 +27,7 @@ export function useAgentActivityDropdown() { isSuccess: executionsSuccess, error: executionsError, } = useGetV1ListAllExecutions({ - query: { select: (res) => (res.status === 200 ? res.data : null) }, + query: { select: okData }, }); // Get all graph IDs from agentInfoMap diff --git a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx index 49790547e4..863b9f601f 100644 --- a/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx +++ b/autogpt_platform/frontend/src/components/layout/Navbar/components/NavbarView.tsx @@ -7,6 +7,7 @@ import { useBreakpoint } from "@/lib/hooks/useBreakpoint"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag"; import { useMemo } from "react"; +import { okData } from "@/app/api/helpers"; import { getAccountMenuItems, loggedInLinks, loggedOutLinks } from "../helpers"; import { AccountMenu } from "./AccountMenu/AccountMenu"; import { AgentActivityDropdown } from "./AgentActivityDropdown/AgentActivityDropdown"; @@ -29,7 +30,7 @@ export function NavbarView({ isLoggedIn, previewBranchName }: NavbarViewProps) { const { data: profile, isLoading: isProfileLoading } = useGetV2GetUserProfile( { query: { - select: (res) => (res.status === 200 ? res.data : null), + select: okData, enabled: isLoggedIn && !!user, // Include user ID in query key to ensure cache invalidation when user changes queryKey: ["/api/store/profile", user?.id], diff --git a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx index 12014e50fe..2b04c0ed9a 100644 --- a/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx +++ b/autogpt_platform/frontend/src/components/organisms/FloatingReviewsPanel/FloatingReviewsPanel.tsx @@ -7,6 +7,7 @@ import { cn } from "@/lib/utils"; import { Text } from "@/components/atoms/Text/Text"; import { useGetV1GetExecutionDetails } from "@/app/api/__generated__/endpoints/graphs/graphs"; import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus"; +import { okData } from "@/app/api/helpers"; import { useGraphStore } from "@/app/(platform)/build/stores/graphStore"; import { useShallow } from "zustand/react/shallow"; @@ -29,13 +30,11 @@ export function FloatingReviewsPanel({ { query: { enabled: !!(graphId && executionId), + select: okData, }, }, ); - const executionStatus = - executionDetails?.status === 200 ? executionDetails.data.status : undefined; - // Get graph execution status from the store (updated via WebSocket) const graphExecutionStatus = useGraphStore( useShallow((state) => state.graphExecutionStatus), @@ -49,7 +48,7 @@ export function FloatingReviewsPanel({ if (executionId) { refetch(); } - }, [executionStatus, executionId, refetch]); + }, [executionDetails?.status, executionId, refetch]); // Refetch when graph execution status changes to REVIEW useEffect(() => { @@ -62,7 +61,7 @@ export function FloatingReviewsPanel({ !executionId || (!isLoading && pendingReviews.length === 0 && - executionStatus !== AgentExecutionStatus.REVIEW) + executionDetails?.status !== AgentExecutionStatus.REVIEW) ) { return null; } diff --git a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx index ddc9bab972..3253b0ee6d 100644 --- a/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx +++ b/autogpt_platform/frontend/src/components/organisms/PendingReviewsList/PendingReviewsList.tsx @@ -44,8 +44,8 @@ export function PendingReviewsList({ const reviewActionMutation = usePostV2ProcessReviewAction({ mutation: { - onSuccess: (data: any) => { - if (data.status !== 200) { + onSuccess: (res) => { + if (res.status !== 200) { toast({ title: "Failed to process reviews", description: "Unexpected response from server", @@ -54,18 +54,18 @@ export function PendingReviewsList({ return; } - const response = data.data; + const result = res.data; - if (response.failed_count > 0) { + if (result.failed_count > 0) { toast({ title: "Reviews partially processed", - description: `${response.approved_count + response.rejected_count} succeeded, ${response.failed_count} failed. ${response.error || "Some reviews could not be processed."}`, + description: `${result.approved_count + result.rejected_count} succeeded, ${result.failed_count} failed. ${result.error || "Some reviews could not be processed."}`, variant: "destructive", }); } else { toast({ title: "Reviews processed successfully", - description: `${response.approved_count} approved, ${response.rejected_count} rejected`, + description: `${result.approved_count} approved, ${result.rejected_count} rejected`, variant: "default", }); } diff --git a/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts index 654ef858b6..07a2b33674 100644 --- a/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts +++ b/autogpt_platform/frontend/src/hooks/useAgentSafeMode.ts @@ -7,6 +7,7 @@ import { import { useToast } from "@/components/molecules/Toast/use-toast"; import { GraphModel } from "@/app/api/__generated__/models/graphModel"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useQueryClient } from "@tanstack/react-query"; import { Graph } from "@/lib/autogpt-server-api/types"; @@ -47,15 +48,19 @@ export function useAgentSafeMode(graph: GraphModel | LibraryAgent | Graph) { const { data: libraryAgent, isLoading } = useGetV2GetLibraryAgentByGraphId( graphId, {}, - { query: { enabled: !isAgent && shouldShowToggle } }, + { + query: { + enabled: !isAgent && shouldShowToggle, + select: okData, + }, + }, ); const [localSafeMode, setLocalSafeMode] = useState(null); useEffect(() => { - if (!isAgent && libraryAgent?.status === 200) { - const backendValue = - libraryAgent.data?.settings?.human_in_the_loop_safe_mode; + if (!isAgent && libraryAgent) { + const backendValue = libraryAgent.settings?.human_in_the_loop_safe_mode; if (backendValue !== undefined) { setLocalSafeMode(backendValue); } diff --git a/autogpt_platform/frontend/src/hooks/usePendingReviews.ts b/autogpt_platform/frontend/src/hooks/usePendingReviews.ts index 111b50a491..8257814fcf 100644 --- a/autogpt_platform/frontend/src/hooks/usePendingReviews.ts +++ b/autogpt_platform/frontend/src/hooks/usePendingReviews.ts @@ -2,12 +2,13 @@ import { useGetV2GetPendingReviews, useGetV2GetPendingReviewsForExecution, } from "@/app/api/__generated__/endpoints/executions/executions"; +import { okData } from "@/app/api/helpers"; export function usePendingReviews() { const query = useGetV2GetPendingReviews(); return { - pendingReviews: (query.data?.status === 200 ? query.data.data : []) || [], + pendingReviews: okData(query.data) || [], isLoading: query.isLoading, error: query.error, refetch: query.refetch, @@ -18,7 +19,7 @@ export function usePendingReviewsForExecution(graphExecId: string) { const query = useGetV2GetPendingReviewsForExecution(graphExecId); return { - pendingReviews: (query.data?.status === 200 ? query.data.data : []) || [], + pendingReviews: okData(query.data) || [], isLoading: query.isLoading, error: query.error, refetch: query.refetch, diff --git a/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts b/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts new file mode 100644 index 0000000000..7d5cef3a04 --- /dev/null +++ b/autogpt_platform/frontend/src/lib/hooks/useUserTimezone.ts @@ -0,0 +1,8 @@ +import { okData } from "@/app/api/helpers"; +import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth"; + +export function useUserTimezone(): "not-set" | string | undefined { + return useGetV1GetUserTimezone({ + query: { select: (res) => okData(res)?.timezone }, + }).data; +} diff --git a/autogpt_platform/frontend/src/lib/react-query/queryClient.ts b/autogpt_platform/frontend/src/lib/react-query/queryClient.ts index 836c505c2f..512629e65b 100644 --- a/autogpt_platform/frontend/src/lib/react-query/queryClient.ts +++ b/autogpt_platform/frontend/src/lib/react-query/queryClient.ts @@ -21,6 +21,10 @@ function makeQueryClient() { let browserQueryClient: QueryClient | undefined = undefined; +/** Only for use *outside client component context* + * (so in server components, API helpers, etc.). + * + * In the context of client components, you should always use `useQueryClient()`. */ export function getQueryClient() { if (isServer) { // Server: create new client every time (so one user's data doesn't leak to another) From c3e407ef09a42f1c6c122363f96c0ce2a39527ef Mon Sep 17 00:00:00 2001 From: Abhimanyu Yadav <122007096+Abhi1992002@users.noreply.github.com> Date: Mon, 22 Dec 2025 07:00:58 +0530 Subject: [PATCH 259/260] feat(frontend): add hover state to edge delete button in FlowEditor (#11601) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The delete button on flow editor edges is always visible, which creates visual clutter. This change makes the button only appear on hover, improving the UI while keeping it accessible. ### Changes 🏗️ - Added hover state management using `useState` to track when the edge delete button is hovered - Applied opacity transition to the delete button (fades in on hover, fades out when not hovered) - Added `onMouseEnter` and `onMouseLeave` handlers to the button to control hover state - Used `cn` utility for conditional className management - Button remains interactive even when `opacity-0` (still clickable for better UX) ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Hover over an edge in the flow editor and verify the delete button fades in smoothly - [x] Move mouse away from edge and verify the delete button fades out smoothly - [x] Click the delete button while hovered to verify it still removes the edge connection - [x] Test with multiple edges to ensure hover state is independent per edge --- .../build/components/FlowEditor/edges/CustomEdge.tsx | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx index b49fd11602..ff80fdc8ac 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/FlowEditor/edges/CustomEdge.tsx @@ -1,3 +1,4 @@ +import { memo, useState } from "react"; import { Button } from "@/components/atoms/Button/Button"; import { BaseEdge, @@ -20,7 +21,6 @@ export type CustomEdgeData = { }; export type CustomEdge = XYEdge; -import { memo } from "react"; const CustomEdge = ({ id, @@ -35,6 +35,8 @@ const CustomEdge = ({ selected, }: EdgeProps) => { const removeConnection = useEdgeStore((state) => state.removeEdge); + const [isHovered, setIsHovered] = useState(false); + const [edgePath, labelX, labelY] = getBezierPath({ sourceX, sourceY, @@ -69,12 +71,17 @@ const CustomEdge = ({ From 88731b1f76a6ad0887f16f1159d5b682f8db101c Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Mon, 22 Dec 2025 12:13:06 +0100 Subject: [PATCH 260/260] feat(platform): marketplace update notifications with enhanced publishing workflow (#11630) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary This PR implements a comprehensive marketplace update notification system that allows users to discover and update to newer agent versions, along with enhanced publishing workflows and UI improvements. image image image image ## Core Features ### 🔔 Marketplace Update Notifications - **Update detection**: Automatically detects when marketplace has newer agent versions than user's local copy - **Creator notifications**: Shows banners for creators with unpublished changes ready to publish - **Non-creator support**: Enables regular users to discover and update to newer marketplace versions - **Version comparison**: Intelligent logic comparing `graph_version` vs marketplace listing versions ### 📋 Enhanced Publishing Workflow - **Builder integration**: Added "Publish to Marketplace" button directly in the builder actions - **Unified banner system**: Consistent `MarketplaceBanners` component across library and marketplace pages - **Streamlined UX**: Fixed layout issues, improved button placement and styling - **Modal improvements**: Fixed thumbnail loading race conditions and infinite loop bugs ### 📚 Version History & Changelog - **Inline version history**: Added version changelog directly to marketplace agent pages - **Version comparison**: Clear display of available versions with current version highlighting - **Update mechanism**: Direct updates using `graph_version` parameter for accuracy ## Technical Implementation ### Backend Changes - **Database schema**: Added `agentGraphVersions` and `agentGraphId` fields to `StoreAgent` model - **API enhancement**: Updated store endpoints to expose graph version data for version comparison - **Data migration**: Fixed agent version field naming from `version` to `agentGraphVersions` - **Model updates**: Enhanced `LibraryAgentUpdateRequest` with `graph_version` field ### Frontend Architecture - **`useMarketplaceUpdate` hook**: Centralized marketplace update detection and creator identification - **`MarketplaceBanners` component**: Unified banner system with proper vertical layout and styling - **`AgentVersionChangelog` component**: Version history display for marketplace pages - **`PublishToMarketplace` component**: Builder integration with modal workflow ### Key Bug Fixes - **Thumbnail loading**: Fixed race condition where images wouldn't load on first modal open - **Infinite loops**: Used refs to prevent circular dependencies in `useThumbnailImages` hook - **Layout issues**: Fixed banner placement, removed duplicate breadcrumbs, corrected vertical layout - **Field naming**: Fixed `agent_version` vs `version` field inconsistencies across APIs ## Files Changed ### Backend - `autogpt_platform/backend/backend/server/v2/store/` - Enhanced store API with graph version data - `autogpt_platform/backend/backend/server/v2/library/` - Updated library API models - `autogpt_platform/backend/migrations/` - Database migrations for version fields - `autogpt_platform/backend/schema.prisma` - Schema updates for graph versions ### Frontend - `src/app/(platform)/components/MarketplaceBanners/` - New unified banner component - `src/app/(platform)/library/agents/[id]/components/` - Enhanced library views with banners - `src/app/(platform)/build/components/BuilderActions/` - Added marketplace publish button - `src/app/(platform)/marketplace/components/AgentInfo/` - Added inline version history - `src/components/contextual/PublishAgentModal/` - Fixed thumbnail loading and modal workflow ## User Experience Impact - **Better discovery**: Users automatically notified of newer agent versions - **Streamlined publishing**: Direct publish access from builder interface - **Reduced friction**: Fixed UI bugs, improved loading states, consistent design - **Enhanced transparency**: Inline version history on marketplace pages - **Creator workflow**: Better notifications for creators with unpublished changes ## Testing - ✅ Update banners appear correctly when marketplace has newer versions - ✅ Creator banners show for users with unpublished changes - ✅ Version comparison logic works with graph_version vs marketplace versions - ✅ Publish button in builder opens modal correctly with pre-populated data - ✅ Thumbnail images load properly on first modal open without infinite loops - ✅ Database migrations completed successfully with version field fixes - ✅ All existing tests updated and passing with new schema changes 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --------- Co-authored-by: Claude Co-authored-by: Lluis Agusti Co-authored-by: Ubbe Co-authored-by: Reinier van der Leer --- .../backend/api/features/library/db.py | 23 +- .../backend/api/features/library/model.py | 3 + .../api/features/library/routes/agents.py | 1 + .../backend/api/features/store/cache.py | 6 +- .../backend/backend/api/features/store/db.py | 28 +- .../backend/api/features/store/db_test.py | 6 + .../backend/api/features/store/model.py | 11 + .../backend/api/features/store/model_test.py | 2 + .../backend/api/features/store/routes.py | 8 +- .../backend/api/features/store/routes_test.py | 6 +- .../backend/backend/data/onboarding.py | 2 + .../migration.sql | 45 +++ .../migration.sql | 81 +++++ autogpt_platform/backend/schema.prisma | 12 +- .../backend/snapshots/agt_details | 8 +- .../marketplace/components/ExpandleRow.tsx | 6 +- .../BuilderActions/BuilderActions.tsx | 2 + .../PublishToMarketplace.tsx | 36 ++ .../usePublishToMarketplace.ts | 48 +++ .../NewAgentLibraryView.tsx | 332 +++++++++++------- .../components/AgentVersionChangelog.tsx | 137 ++++++++ .../components/other/AgentSettingsButton.tsx | 14 +- .../SelectedRunView/SelectedRunView.tsx | 42 ++- .../SelectedRunActions/SelectedRunActions.tsx | 2 - .../SelectedScheduleView.tsx | 3 + .../SelectedTemplateView.tsx | 4 +- .../SelectedTriggerView.tsx | 4 +- .../selected-views/SelectedViewLayout.tsx | 7 + .../hooks/useMarketplaceUpdate.ts | 163 +++++++++ .../useNewAgentLibraryView.ts | 7 +- .../components/AgentInfo/AgentInfo.tsx | 143 +++++++- .../MainAgentPage/MainAgentPage.tsx | 97 ++--- .../MainAgentPage/useMainAgentPage.ts | 14 +- .../AgentTableRow/AgentTableRow.tsx | 3 + .../AgentTableRow/useAgentTableRow.ts | 4 +- .../MainDashboardPage/MainDashboardPage.tsx | 1 + .../frontend/src/app/api/openapi.json | 43 +++ .../MarketplaceBanners/MarketplaceBanners.tsx | 102 ++++++ .../PublishAgentModal/PublishAgentModal.tsx | 10 +- .../AgentInfoStep/AgentInfoStep.tsx | 37 ++ .../components/useThumbnailImages.ts | 11 + .../components/AgentInfoStep/helpers.ts | 145 ++++++-- .../AgentInfoStep/useAgentInfoStep.ts | 11 +- .../components/AgentReviewStep.tsx | 4 +- .../contextual/PublishAgentModal/helpers.ts | 4 + .../PublishAgentModal/usePublishAgentModal.ts | 151 +++++++- .../contextual/marketplaceHelpers.ts | 57 +++ 47 files changed, 1600 insertions(+), 286 deletions(-) create mode 100644 autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql create mode 100644 autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx create mode 100644 autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/hooks/useMarketplaceUpdate.ts create mode 100644 autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx create mode 100644 autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts diff --git a/autogpt_platform/backend/backend/api/features/library/db.py b/autogpt_platform/backend/backend/api/features/library/db.py index ad34326700..69ed0d2730 100644 --- a/autogpt_platform/backend/backend/api/features/library/db.py +++ b/autogpt_platform/backend/backend/api/features/library/db.py @@ -538,6 +538,7 @@ async def update_library_agent( library_agent_id: str, user_id: str, auto_update_version: Optional[bool] = None, + graph_version: Optional[int] = None, is_favorite: Optional[bool] = None, is_archived: Optional[bool] = None, is_deleted: Optional[Literal[False]] = None, @@ -550,6 +551,7 @@ async def update_library_agent( library_agent_id: The ID of the LibraryAgent to update. user_id: The owner of this LibraryAgent. auto_update_version: Whether the agent should auto-update to active version. + graph_version: Specific graph version to update to. is_favorite: Whether this agent is marked as a favorite. is_archived: Whether this agent is archived. settings: User-specific settings for this library agent. @@ -563,8 +565,8 @@ async def update_library_agent( """ logger.debug( f"Updating library agent {library_agent_id} for user {user_id} with " - f"auto_update_version={auto_update_version}, is_favorite={is_favorite}, " - f"is_archived={is_archived}, settings={settings}" + f"auto_update_version={auto_update_version}, graph_version={graph_version}, " + f"is_favorite={is_favorite}, is_archived={is_archived}, settings={settings}" ) update_fields: prisma.types.LibraryAgentUpdateManyMutationInput = {} if auto_update_version is not None: @@ -581,10 +583,23 @@ async def update_library_agent( update_fields["isDeleted"] = is_deleted if settings is not None: update_fields["settings"] = SafeJson(settings.model_dump()) - if not update_fields: - raise ValueError("No values were passed to update") try: + # If graph_version is provided, update to that specific version + if graph_version is not None: + # Get the current agent to find its graph_id + agent = await get_library_agent(id=library_agent_id, user_id=user_id) + # Update to the specified version using existing function + return await update_agent_version_in_library( + user_id=user_id, + agent_graph_id=agent.graph_id, + agent_graph_version=graph_version, + ) + + # Otherwise, just update the simple fields + if not update_fields: + raise ValueError("No values were passed to update") + n_updated = await prisma.models.LibraryAgent.prisma().update_many( where={"id": library_agent_id, "userId": user_id}, data=update_fields, diff --git a/autogpt_platform/backend/backend/api/features/library/model.py b/autogpt_platform/backend/backend/api/features/library/model.py index ab4bec586e..c20f82afae 100644 --- a/autogpt_platform/backend/backend/api/features/library/model.py +++ b/autogpt_platform/backend/backend/api/features/library/model.py @@ -385,6 +385,9 @@ class LibraryAgentUpdateRequest(pydantic.BaseModel): auto_update_version: Optional[bool] = pydantic.Field( default=None, description="Auto-update the agent version" ) + graph_version: Optional[int] = pydantic.Field( + default=None, description="Specific graph version to update to" + ) is_favorite: Optional[bool] = pydantic.Field( default=None, description="Mark the agent as a favorite" ) diff --git a/autogpt_platform/backend/backend/api/features/library/routes/agents.py b/autogpt_platform/backend/backend/api/features/library/routes/agents.py index 5a043009fc..38c34dd3b8 100644 --- a/autogpt_platform/backend/backend/api/features/library/routes/agents.py +++ b/autogpt_platform/backend/backend/api/features/library/routes/agents.py @@ -285,6 +285,7 @@ async def update_library_agent( library_agent_id=library_agent_id, user_id=user_id, auto_update_version=payload.auto_update_version, + graph_version=payload.graph_version, is_favorite=payload.is_favorite, is_archived=payload.is_archived, settings=payload.settings, diff --git a/autogpt_platform/backend/backend/api/features/store/cache.py b/autogpt_platform/backend/backend/api/features/store/cache.py index 7832069d49..5d9bc24e5d 100644 --- a/autogpt_platform/backend/backend/api/features/store/cache.py +++ b/autogpt_platform/backend/backend/api/features/store/cache.py @@ -43,10 +43,12 @@ async def _get_cached_store_agents( # Cache individual agent details for 15 minutes @cached(maxsize=200, ttl_seconds=300, shared_cache=True) -async def _get_cached_agent_details(username: str, agent_name: str): +async def _get_cached_agent_details( + username: str, agent_name: str, include_changelog: bool = False +): """Cached helper to get agent details.""" return await store_db.get_store_agent_details( - username=username, agent_name=agent_name + username=username, agent_name=agent_name, include_changelog=include_changelog ) diff --git a/autogpt_platform/backend/backend/api/features/store/db.py b/autogpt_platform/backend/backend/api/features/store/db.py index 12f1783468..8e5a39df89 100644 --- a/autogpt_platform/backend/backend/api/features/store/db.py +++ b/autogpt_platform/backend/backend/api/features/store/db.py @@ -257,7 +257,7 @@ async def log_search_term(search_query: str): async def get_store_agent_details( - username: str, agent_name: str + username: str, agent_name: str, include_changelog: bool = False ) -> store_model.StoreAgentDetails: """Get PUBLIC store agent details from the StoreAgent view""" logger.debug(f"Getting store agent details for {username}/{agent_name}") @@ -322,6 +322,27 @@ async def get_store_agent_details( else: recommended_schedule_cron = None + # Fetch changelog data if requested + changelog_data = None + if include_changelog and store_listing: + changelog_versions = ( + await prisma.models.StoreListingVersion.prisma().find_many( + where={ + "storeListingId": store_listing.id, + "submissionStatus": prisma.enums.SubmissionStatus.APPROVED, + }, + order=[{"version": "desc"}], + ) + ) + changelog_data = [ + store_model.ChangelogEntry( + version=str(version.version), + changes_summary=version.changesSummary or "No changes recorded", + date=version.createdAt, + ) + for version in changelog_versions + ] + logger.debug(f"Found agent details for {username}/{agent_name}") return store_model.StoreAgentDetails( store_listing_version_id=agent.storeListingVersionId, @@ -338,10 +359,13 @@ async def get_store_agent_details( runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, active_version_id=active_version_id, has_approved_version=has_approved_version, recommended_schedule_cron=recommended_schedule_cron, + changelog=changelog_data, ) except store_exceptions.AgentNotFoundError: raise @@ -409,6 +433,8 @@ async def get_store_agent_by_version_id( runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, ) except store_exceptions.AgentNotFoundError: diff --git a/autogpt_platform/backend/backend/api/features/store/db_test.py b/autogpt_platform/backend/backend/api/features/store/db_test.py index 641f392d86..b48ce5db95 100644 --- a/autogpt_platform/backend/backend/api/features/store/db_test.py +++ b/autogpt_platform/backend/backend/api/features/store/db_test.py @@ -40,6 +40,8 @@ async def test_get_store_agents(mocker): runs=10, rating=4.5, versions=["1.0"], + agentGraphVersions=["1"], + agentGraphId="test-graph-id", updated_at=datetime.now(), is_available=False, useForOnboarding=False, @@ -83,6 +85,8 @@ async def test_get_store_agent_details(mocker): runs=10, rating=4.5, versions=["1.0"], + agentGraphVersions=["1"], + agentGraphId="test-graph-id", updated_at=datetime.now(), is_available=False, useForOnboarding=False, @@ -105,6 +109,8 @@ async def test_get_store_agent_details(mocker): runs=15, rating=4.8, versions=["1.0", "2.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id-active", updated_at=datetime.now(), is_available=True, useForOnboarding=False, diff --git a/autogpt_platform/backend/backend/api/features/store/model.py b/autogpt_platform/backend/backend/api/features/store/model.py index 745c969ae6..972898b296 100644 --- a/autogpt_platform/backend/backend/api/features/store/model.py +++ b/autogpt_platform/backend/backend/api/features/store/model.py @@ -7,6 +7,12 @@ import pydantic from backend.util.models import Pagination +class ChangelogEntry(pydantic.BaseModel): + version: str + changes_summary: str + date: datetime.datetime + + class MyAgent(pydantic.BaseModel): agent_id: str agent_version: int @@ -55,12 +61,17 @@ class StoreAgentDetails(pydantic.BaseModel): runs: int rating: float versions: list[str] + agentGraphVersions: list[str] + agentGraphId: str last_updated: datetime.datetime recommended_schedule_cron: str | None = None active_version_id: str | None = None has_approved_version: bool = False + # Optional changelog data when include_changelog=True + changelog: list[ChangelogEntry] | None = None + class Creator(pydantic.BaseModel): name: str diff --git a/autogpt_platform/backend/backend/api/features/store/model_test.py b/autogpt_platform/backend/backend/api/features/store/model_test.py index 3633e6549e..a37966601b 100644 --- a/autogpt_platform/backend/backend/api/features/store/model_test.py +++ b/autogpt_platform/backend/backend/api/features/store/model_test.py @@ -72,6 +72,8 @@ def test_store_agent_details(): runs=50, rating=4.5, versions=["1.0", "2.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id", last_updated=datetime.datetime.now(), ) assert details.slug == "test-agent" diff --git a/autogpt_platform/backend/backend/api/features/store/routes.py b/autogpt_platform/backend/backend/api/features/store/routes.py index 7d4db50d3f..7816b25d5a 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes.py +++ b/autogpt_platform/backend/backend/api/features/store/routes.py @@ -152,7 +152,11 @@ async def get_agents( tags=["store", "public"], response_model=store_model.StoreAgentDetails, ) -async def get_agent(username: str, agent_name: str): +async def get_agent( + username: str, + agent_name: str, + include_changelog: bool = fastapi.Query(default=False), +): """ This is only used on the AgentDetails Page. @@ -162,7 +166,7 @@ async def get_agent(username: str, agent_name: str): # URL decode the agent name since it comes from the URL path agent_name = urllib.parse.unquote(agent_name).lower() agent = await store_cache._get_cached_agent_details( - username=username, agent_name=agent_name + username=username, agent_name=agent_name, include_changelog=include_changelog ) return agent diff --git a/autogpt_platform/backend/backend/api/features/store/routes_test.py b/autogpt_platform/backend/backend/api/features/store/routes_test.py index b9c040c149..7fdc0b9ebb 100644 --- a/autogpt_platform/backend/backend/api/features/store/routes_test.py +++ b/autogpt_platform/backend/backend/api/features/store/routes_test.py @@ -374,6 +374,8 @@ def test_get_agent_details( runs=100, rating=4.5, versions=["1.0.0", "1.1.0"], + agentGraphVersions=["1", "2"], + agentGraphId="test-graph-id", last_updated=FIXED_NOW, ) mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agent_details") @@ -387,7 +389,9 @@ def test_get_agent_details( assert data.creator == "creator1" snapshot.snapshot_dir = "snapshots" snapshot.assert_match(json.dumps(response.json(), indent=2), "agt_details") - mock_db_call.assert_called_once_with(username="creator1", agent_name="test-agent") + mock_db_call.assert_called_once_with( + username="creator1", agent_name="test-agent", include_changelog=False + ) def test_get_creators_defaults( diff --git a/autogpt_platform/backend/backend/data/onboarding.py b/autogpt_platform/backend/backend/data/onboarding.py index d9977e9535..cc63b89afd 100644 --- a/autogpt_platform/backend/backend/data/onboarding.py +++ b/autogpt_platform/backend/backend/data/onboarding.py @@ -442,6 +442,8 @@ async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]: runs=agent.runs, rating=agent.rating, versions=agent.versions, + agentGraphVersions=agent.agentGraphVersions, + agentGraphId=agent.agentGraphId, last_updated=agent.updated_at, ) for agent in recommended_agents diff --git a/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql b/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql new file mode 100644 index 0000000000..676fe641b6 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251216182139_fix_store_submission_agent_version/migration.sql @@ -0,0 +1,45 @@ +-- Fix StoreSubmission view to use agentGraphVersion instead of version for agent_version field +-- This ensures that submission.agent_version returns the actual agent graph version, not the store listing version number + +BEGIN; + +-- Recreate the view with the corrected agent_version field (using agentGraphVersion instead of version) +CREATE OR REPLACE VIEW "StoreSubmission" AS +SELECT + sl.id AS listing_id, + sl."owningUserId" AS user_id, + slv."agentGraphId" AS agent_id, + slv."agentGraphVersion" AS agent_version, + sl.slug, + COALESCE(slv.name, '') AS name, + slv."subHeading" AS sub_heading, + slv.description, + slv.instructions, + slv."imageUrls" AS image_urls, + slv."submittedAt" AS date_submitted, + slv."submissionStatus" AS status, + COALESCE(ar.run_count, 0::bigint) AS runs, + COALESCE(avg(sr.score::numeric), 0.0)::double precision AS rating, + slv.id AS store_listing_version_id, + slv."reviewerId" AS reviewer_id, + slv."reviewComments" AS review_comments, + slv."internalComments" AS internal_comments, + slv."reviewedAt" AS reviewed_at, + slv."changesSummary" AS changes_summary, + slv."videoUrl" AS video_url, + slv.categories +FROM "StoreListing" sl + JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl.id + LEFT JOIN "StoreListingReview" sr ON sr."storeListingVersionId" = slv.id + LEFT JOIN ( + SELECT "AgentGraphExecution"."agentGraphId", count(*) AS run_count + FROM "AgentGraphExecution" + GROUP BY "AgentGraphExecution"."agentGraphId" + ) ar ON ar."agentGraphId" = slv."agentGraphId" +WHERE sl."isDeleted" = false +GROUP BY sl.id, sl."owningUserId", slv.id, slv."agentGraphId", slv."agentGraphVersion", sl.slug, slv.name, + slv."subHeading", slv.description, slv.instructions, slv."imageUrls", slv."submittedAt", + slv."submissionStatus", slv."reviewerId", slv."reviewComments", slv."internalComments", + slv."reviewedAt", slv."changesSummary", slv."videoUrl", slv.categories, ar.run_count; + +COMMIT; \ No newline at end of file diff --git a/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql b/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql new file mode 100644 index 0000000000..495ac113b4 --- /dev/null +++ b/autogpt_platform/backend/migrations/20251217174500_fix_store_agent_versions_to_graph_versions/migration.sql @@ -0,0 +1,81 @@ +-- Add agentGraphVersions field to StoreAgent view for consistent version comparison +-- This keeps the existing versions field unchanged and adds a new field with graph versions +-- This makes it safe for version comparison with LibraryAgent.graph_version + +BEGIN; + +-- Drop and recreate the StoreAgent view with new agentGraphVersions field +DROP VIEW IF EXISTS "StoreAgent"; + +CREATE OR REPLACE VIEW "StoreAgent" AS +WITH latest_versions AS ( + SELECT + "storeListingId", + MAX(version) AS max_version + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +), +agent_versions AS ( + SELECT + "storeListingId", + array_agg(DISTINCT version::text ORDER BY version::text) AS versions + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +), +agent_graph_versions AS ( + SELECT + "storeListingId", + array_agg(DISTINCT "agentGraphVersion"::text ORDER BY "agentGraphVersion"::text) AS graph_versions + FROM "StoreListingVersion" + WHERE "submissionStatus" = 'APPROVED' + GROUP BY "storeListingId" +) +SELECT + sl.id AS listing_id, + slv.id AS "storeListingVersionId", + slv."createdAt" AS updated_at, + sl.slug, + COALESCE(slv.name, '') AS agent_name, + slv."videoUrl" AS agent_video, + slv."agentOutputDemoUrl" AS agent_output_demo, + COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image, + slv."isFeatured" AS featured, + p.username AS creator_username, -- Allow NULL for malformed sub-agents + p."avatarUrl" AS creator_avatar, -- Allow NULL for malformed sub-agents + slv."subHeading" AS sub_heading, + slv.description, + slv.categories, + slv.search, + COALESCE(ar.run_count, 0::bigint) AS runs, + COALESCE(rs.avg_rating, 0.0)::double precision AS rating, + COALESCE(av.versions, ARRAY[slv.version::text]) AS versions, + COALESCE(agv.graph_versions, ARRAY[slv."agentGraphVersion"::text]) AS "agentGraphVersions", + slv."agentGraphId", + slv."isAvailable" AS is_available, + COALESCE(sl."useForOnboarding", false) AS "useForOnboarding" +FROM "StoreListing" sl +JOIN latest_versions lv + ON sl.id = lv."storeListingId" +JOIN "StoreListingVersion" slv + ON slv."storeListingId" = lv."storeListingId" + AND slv.version = lv.max_version + AND slv."submissionStatus" = 'APPROVED' +JOIN "AgentGraph" a + ON slv."agentGraphId" = a.id + AND slv."agentGraphVersion" = a.version +LEFT JOIN "Profile" p + ON sl."owningUserId" = p."userId" +LEFT JOIN "mv_review_stats" rs + ON sl.id = rs."storeListingId" +LEFT JOIN "mv_agent_run_counts" ar + ON a.id = ar."agentGraphId" +LEFT JOIN agent_versions av + ON sl.id = av."storeListingId" +LEFT JOIN agent_graph_versions agv + ON sl.id = agv."storeListingId" +WHERE sl."isDeleted" = false + AND sl."hasApprovedVersion" = true; + +COMMIT; \ No newline at end of file diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index d81cd4d1b1..2f6c109c03 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -734,11 +734,13 @@ view StoreAgent { description String categories String[] search Unsupported("tsvector")? @default(dbgenerated("''::tsvector")) - runs Int - rating Float - versions String[] - is_available Boolean @default(true) - useForOnboarding Boolean @default(false) + runs Int + rating Float + versions String[] + agentGraphVersions String[] + agentGraphId String + is_available Boolean @default(true) + useForOnboarding Boolean @default(false) // Materialized views used (refreshed every 15 minutes via pg_cron): // - mv_agent_run_counts - Pre-aggregated agent execution counts by agentGraphId diff --git a/autogpt_platform/backend/snapshots/agt_details b/autogpt_platform/backend/snapshots/agt_details index 649b5ed644..0d69f1c23a 100644 --- a/autogpt_platform/backend/snapshots/agt_details +++ b/autogpt_platform/backend/snapshots/agt_details @@ -23,8 +23,14 @@ "1.0.0", "1.1.0" ], + "agentGraphVersions": [ + "1", + "2" + ], + "agentGraphId": "test-graph-id", "last_updated": "2023-01-01T00:00:00", "recommended_schedule_cron": null, "active_version_id": null, - "has_approved_version": false + "has_approved_version": false, + "changelog": null } \ No newline at end of file diff --git a/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx b/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx index cf0f2389aa..e13a5eeed3 100644 --- a/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/admin/marketplace/components/ExpandleRow.tsx @@ -102,7 +102,7 @@ export function ExpandableRow({ Version Status - {/* Changes */} + Changes Submitted Reviewed External Comments @@ -127,9 +127,9 @@ export function ExpandableRow({ )} {getStatusBadge(version.status)} - {/* + {version.changes_summary || "No summary"} - */} + {version.date_submitted ? formatDistanceToNow( diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx index afe70bd434..64eb624621 100644 --- a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/BuilderActions.tsx @@ -2,6 +2,7 @@ import { parseAsString, useQueryStates } from "nuqs"; import { AgentOutputs } from "./components/AgentOutputs/AgentOutputs"; import { RunGraph } from "./components/RunGraph/RunGraph"; import { ScheduleGraph } from "./components/ScheduleGraph/ScheduleGraph"; +import { PublishToMarketplace } from "./components/PublishToMarketplace/PublishToMarketplace"; import { memo } from "react"; export const BuilderActions = memo(() => { @@ -13,6 +14,7 @@ export const BuilderActions = memo(() => { +
); }); diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx new file mode 100644 index 0000000000..1e6545dfbd --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/PublishToMarketplace.tsx @@ -0,0 +1,36 @@ +import { ShareIcon } from "@phosphor-icons/react"; +import { BuilderActionButton } from "../BuilderActionButton"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/atoms/Tooltip/BaseTooltip"; +import { usePublishToMarketplace } from "./usePublishToMarketplace"; +import { PublishAgentModal } from "@/components/contextual/PublishAgentModal/PublishAgentModal"; + +export const PublishToMarketplace = ({ flowID }: { flowID: string | null }) => { + const { handlePublishToMarketplace, publishState, handleStateChange } = + usePublishToMarketplace({ flowID }); + + return ( + <> + + + + + + + Publish to Marketplace + + + + + ); +}; diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts new file mode 100644 index 0000000000..ceaa4de905 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/build/components/BuilderActions/components/PublishToMarketplace/usePublishToMarketplace.ts @@ -0,0 +1,48 @@ +import { useState, useCallback } from "react"; +import type { StoreSubmission } from "@/app/api/__generated__/models/storeSubmission"; + +export type PublishStep = "select" | "info" | "review"; + +export type PublishState = { + isOpen: boolean; + step: PublishStep; + submissionData: StoreSubmission | null; +}; + +const defaultPublishState: PublishState = { + isOpen: false, + step: "select", + submissionData: null, +}; + +interface UsePublishToMarketplaceProps { + flowID: string | null; +} + +export function usePublishToMarketplace({ + flowID, +}: UsePublishToMarketplaceProps) { + const [publishState, setPublishState] = + useState(defaultPublishState); + + const handlePublishToMarketplace = () => { + if (!flowID) return; + + // Open the publish modal starting with the select step + setPublishState({ + isOpen: true, + step: "select", + submissionData: null, + }); + }; + + const handleStateChange = useCallback((newState: PublishState) => { + setPublishState(newState); + }, []); + + return { + handlePublishToMarketplace, + publishState, + handleStateChange, + }; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx index 2d7a1b30f4..3768a0d150 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/NewAgentLibraryView.tsx @@ -5,8 +5,13 @@ import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; import { cn } from "@/lib/utils"; import { PlusIcon } from "@phosphor-icons/react"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { RunAgentModal } from "./components/modals/RunAgentModal/RunAgentModal"; +import { useMarketplaceUpdate } from "./hooks/useMarketplaceUpdate"; +import { AgentVersionChangelog } from "./components/AgentVersionChangelog"; +import { MarketplaceBanners } from "@/components/contextual/MarketplaceBanners/MarketplaceBanners"; +import { PublishAgentModal } from "@/components/contextual/PublishAgentModal/PublishAgentModal"; +import { AgentSettingsButton } from "./components/other/AgentSettingsButton"; import { AgentRunsLoading } from "./components/other/AgentRunsLoading"; import { EmptySchedules } from "./components/other/EmptySchedules"; import { EmptyTasks } from "./components/other/EmptyTasks"; @@ -16,9 +21,9 @@ import { SectionWrap } from "./components/other/SectionWrap"; import { LoadingSelectedContent } from "./components/selected-views/LoadingSelectedContent"; import { SelectedRunView } from "./components/selected-views/SelectedRunView/SelectedRunView"; import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView"; +import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView"; import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView"; -import { SelectedSettingsView } from "./components/selected-views/SelectedSettingsView/SelectedSettingsView"; import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout"; import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList"; import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers"; @@ -26,6 +31,7 @@ import { useNewAgentLibraryView } from "./useNewAgentLibraryView"; export function NewAgentLibraryView() { const { + agentId, agent, ready, activeTemplate, @@ -39,18 +45,79 @@ export function NewAgentLibraryView() { handleSelectRun, handleCountsChange, handleClearSelectedRun, - onRunInitiated, handleSelectSettings, + onRunInitiated, onTriggerSetup, onScheduleCreated, } = useNewAgentLibraryView(); + const { + hasAgentMarketplaceUpdate, + hasMarketplaceUpdate, + latestMarketplaceVersion, + isUpdating, + modalOpen, + setModalOpen, + handlePublishUpdate, + handleUpdateToLatest, + } = useMarketplaceUpdate({ agent }); + + const [changelogOpen, setChangelogOpen] = useState(false); + useEffect(() => { if (agent) { document.title = `${agent.name} - Library - AutoGPT Platform`; } }, [agent]); + function renderMarketplaceUpdateBanner() { + return ( + setChangelogOpen(true)} + /> + ); + } + + function renderPublishAgentModal() { + if (!modalOpen || !agent) return null; + + return ( + { + if (!state.isOpen) { + setModalOpen(false); + } + }} + /> + ); + } + + function renderVersionChangelog() { + if (!agent) return null; + + return ( + setChangelogOpen(false)} + /> + ); + } + if (error) { return ( -
-
+ <> +
+
+
+ +
-
- -
-
+ {renderPublishAgentModal()} + {renderVersionChangelog()} + ); } return ( -
- -
- - New task - - } - agent={agent} - onRunCreated={onRunInitiated} - onScheduleCreated={onScheduleCreated} - onTriggerSetup={onTriggerSetup} - initialInputValues={activeTemplate?.inputs} - initialInputCredentials={activeTemplate?.credentials} - /> -
+ <> +
+ +
+
+ + New task + + } + agent={agent} + onRunCreated={onRunInitiated} + onScheduleCreated={onScheduleCreated} + onTriggerSetup={onTriggerSetup} + initialInputValues={activeTemplate?.inputs} + initialInputCredentials={activeTemplate?.credentials} + /> + +
+
- -
- - {activeItem ? ( - activeItem === "settings" ? ( - - ) : activeTab === "scheduled" ? ( - - ) : activeTab === "templates" ? ( - handleSelectRun(execution.id, "runs")} - onSwitchToRunsTab={() => setActiveTab("runs")} - /> - ) : activeTab === "triggers" ? ( - setActiveTab("runs")} - /> - ) : ( - - ) - ) : sidebarLoading ? ( - - ) : activeTab === "scheduled" ? ( - - - - ) : activeTab === "templates" ? ( - - - - ) : activeTab === "triggers" ? ( - - - - ) : ( - - + + {activeItem ? ( + activeItem === "settings" ? ( + + ) : activeTab === "scheduled" ? ( + + ) : activeTab === "templates" ? ( + + handleSelectRun(execution.id, "runs") + } + onSwitchToRunsTab={() => setActiveTab("runs")} + banner={renderMarketplaceUpdateBanner()} + /> + ) : activeTab === "triggers" ? ( + setActiveTab("runs")} + banner={renderMarketplaceUpdateBanner()} + /> + ) : ( + + ) + ) : sidebarLoading ? ( + + ) : activeTab === "scheduled" ? ( + - - )} -
+ banner={renderMarketplaceUpdateBanner()} + > + + + ) : activeTab === "templates" ? ( + + + + ) : activeTab === "triggers" ? ( + + + + ) : ( + + + + )} +
+ {renderPublishAgentModal()} + {renderVersionChangelog()} + ); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx new file mode 100644 index 0000000000..8781376b17 --- /dev/null +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/AgentVersionChangelog.tsx @@ -0,0 +1,137 @@ +"use client"; + +import { Text } from "@/components/atoms/Text/Text"; +import { Dialog } from "@/components/molecules/Dialog/Dialog"; +import { Skeleton } from "@/components/__legacy__/ui/skeleton"; +import { useGetV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store"; +import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; +import type { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; +import React from "react"; + +interface AgentVersionChangelogProps { + agent: LibraryAgent; + isOpen: boolean; + onClose: () => void; +} + +interface VersionInfo { + version: number; + isCurrentVersion: boolean; +} + +export function AgentVersionChangelog({ + agent, + isOpen, + onClose, +}: AgentVersionChangelogProps) { + // Get marketplace data if agent has marketplace listing + const { data: storeAgentData, isLoading } = useGetV2GetSpecificAgent( + agent?.marketplace_listing?.creator.slug || "", + agent?.marketplace_listing?.slug || "", + {}, + { + query: { + enabled: !!( + agent?.marketplace_listing?.creator.slug && + agent?.marketplace_listing?.slug + ), + }, + }, + ); + + // Create version info from available graph versions + const storeData = okData(storeAgentData) as StoreAgentDetails | undefined; + const agentVersions: VersionInfo[] = storeData?.agentGraphVersions + ? storeData.agentGraphVersions + .map((versionStr: string) => parseInt(versionStr, 10)) + .sort((a: number, b: number) => b - a) // Sort descending (newest first) + .map((version: number) => ({ + version, + isCurrentVersion: version === agent.graph_version, + })) + : []; + + const renderVersionItem = (versionInfo: VersionInfo) => { + return ( +
+
+
+ + v{versionInfo.version} + + {versionInfo.isCurrentVersion && ( + + Current + + )} +
+
+ + + Available marketplace version + +
+ ); + }; + + return ( + { + if (!isOpen) { + onClose(); + } + }, + }} + > + +
+ {isLoading ? ( +
+ + + + +
+ ) : agentVersions.length > 0 ? ( +
+ + View changes and updates across different versions of this + agent. + + {agentVersions.map(renderVersionItem)} +
+ ) : ( +
+ + No version history available for this agent. + +
+ )} +
+
+
+ ); +} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx index bc710ebc4e..11dcbd943f 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/other/AgentSettingsButton.tsx @@ -6,9 +6,14 @@ import { useAgentSafeMode } from "@/hooks/useAgentSafeMode"; interface Props { agent: LibraryAgent; onSelectSettings: () => void; + selected?: boolean; } -export function AgentSettingsButton({ agent, onSelectSettings }: Props) { +export function AgentSettingsButton({ + agent, + onSelectSettings, + selected, +}: Props) { const { hasHITLBlocks } = useAgentSafeMode(agent); if (!hasHITLBlocks) { @@ -17,13 +22,16 @@ export function AgentSettingsButton({ agent, onSelectSettings }: Props) { return ( ); } diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx index 9e470139ff..c66f0e9245 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/SelectedRunView.tsx @@ -32,6 +32,7 @@ interface Props { runId: string; onSelectRun?: (id: string) => void; onClearSelectedRun?: () => void; + banner?: React.ReactNode; onSelectSettings?: () => void; selectedSettings?: boolean; } @@ -41,7 +42,9 @@ export function SelectedRunView({ runId, onSelectRun, onClearSelectedRun, + banner, onSelectSettings, + selectedSettings, }: Props) { const { run, preset, isLoading, responseError, httpError } = useSelectedRunView(agent.graph_id, runId); @@ -81,7 +84,12 @@ export function SelectedRunView({ return (
- +
@@ -105,7 +113,7 @@ export function SelectedRunView({ )} @@ -130,20 +138,22 @@ export function SelectedRunView({ {/* Human-in-the-Loop Reviews Section */} {withReviews && ( -
- {reviewsLoading ? ( - - ) : pendingReviews.length > 0 ? ( - - ) : ( - - No pending reviews for this execution - - )} +
+ + {reviewsLoading ? ( + + ) : pendingReviews.length > 0 ? ( + + ) : ( + + No pending reviews for this execution + + )} +
)} diff --git a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx index cb821b2ecd..83c836def4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/SelectedRunView/components/SelectedRunActions/SelectedRunActions.tsx @@ -15,7 +15,6 @@ import { SelectedActionsWrap } from "../../../SelectedActionsWrap"; import { ShareRunButton } from "../../../ShareRunButton/ShareRunButton"; import { CreateTemplateModal } from "../CreateTemplateModal/CreateTemplateModal"; import { useSelectedRunActions } from "./useSelectedRunActions"; -import { SafeModeToggle } from "../SafeModeToggle"; type Props = { agent: LibraryAgent; @@ -113,7 +112,6 @@ export function SelectedRunActions({ shareToken={run.share_token} /> )} - {canRunManually && ( <> + )} +
+ ) : ( +
+ Version {version} +
+ )}
diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx index 5eb3984cbc..4e5b9de6c4 100644 --- a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/MainAgentPage.tsx @@ -2,6 +2,7 @@ import { Separator } from "@/components/__legacy__/ui/separator"; import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs"; import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard"; +import { okData } from "@/app/api/helpers"; import { MarketplaceAgentPageParams } from "../../agent/[creator]/[slug]/page"; import { AgentImages } from "../AgentImages/AgentImage"; import { AgentInfo } from "../AgentInfo/AgentInfo"; @@ -10,24 +11,33 @@ import { AgentsSection } from "../AgentsSection/AgentsSection"; import { BecomeACreator } from "../BecomeACreator/BecomeACreator"; import { useMainAgentPage } from "./useMainAgentPage"; -type MainAgentPageProps = { +interface Props { params: MarketplaceAgentPageParams; -}; +} -export const MainAgentPage = ({ params }: MainAgentPageProps) => { +export function MainAgentPage({ params }: Props) { const { agent, - otherAgents, - similarAgents, - libraryAgent, + user, isLoading, hasError, - user, + similarAgents, + otherAgents, + libraryAgent, } = useMainAgentPage({ params }); if (isLoading) { - return ; + return ( +
+
+
+ +
+
+
+ ); } + if (hasError) { return (
@@ -46,7 +56,8 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { ); } - if (!agent) { + const agentData = okData(agent); + if (!agentData) { return (
@@ -55,8 +66,6 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { isSuccess={false} responseError={{ message: "Agent not found" }} context="agent page" - onRetry={() => window.location.reload()} - className="w-full max-w-md" />
@@ -67,10 +76,10 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { const breadcrumbs = [ { name: "Marketplace", link: "/marketplace" }, { - name: agent.creator, - link: `/marketplace/creator/${encodeURIComponent(agent.creator)}`, + name: agentData.creator ?? "", + link: `/marketplace/creator/${encodeURIComponent(agentData.creator ?? "")}`, }, - { name: agent.agent_name, link: "#" }, + { name: agentData.agent_name ?? "", link: "#" }, ]; return ( @@ -82,18 +91,29 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => {
parseInt(v, 10)), + ).toString() + : "1" + } + storeListingVersionId={agentData.store_listing_version_id ?? ""} isAgentAddedToLibrary={Boolean(libraryAgent)} + creatorSlug={params.creator} + agentSlug={params.slug} />
{ const orderedImages: string[] = []; // 1. YouTube/Overview video (if it exists) - if (agent.agent_video) { - orderedImages.push(agent.agent_video); + if (agentData.agent_video) { + orderedImages.push(agentData.agent_video); } // 2. First image (hero) - if (agent.agent_image.length > 0) { - orderedImages.push(agent.agent_image[0]); + if (agentData.agent_image?.length > 0) { + orderedImages.push(agentData.agent_image[0]); } // 3. Agent Output Demo (if it exists) - if ((agent as any).agent_output_demo) { - orderedImages.push((agent as any).agent_output_demo); + if (agentData.agent_output_demo) { + orderedImages.push(agentData.agent_output_demo); } // 4. Additional images - if (agent.agent_image.length > 1) { - orderedImages.push(...agent.agent_image.slice(1)); + if (agentData.agent_image && agentData.agent_image.length > 1) { + orderedImages.push(...agentData.agent_image.slice(1)); } return orderedImages; @@ -129,7 +149,7 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { )} @@ -140,13 +160,8 @@ export const MainAgentPage = ({ params }: MainAgentPageProps) => { sectionTitle="Similar agents" /> )} - - +
); -}; +} diff --git a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts index ef38f336d0..674955545e 100644 --- a/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts +++ b/autogpt_platform/frontend/src/app/(platform)/marketplace/components/MainAgentPage/useMainAgentPage.ts @@ -5,8 +5,8 @@ import { import { MarketplaceAgentPageParams } from "../../agent/[creator]/[slug]/page"; import { useGetV2GetAgentByStoreId } from "@/app/api/__generated__/endpoints/library/library"; import { StoreAgentsResponse } from "@/app/api/__generated__/models/storeAgentsResponse"; -import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails"; import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent"; +import { okData } from "@/app/api/helpers"; import { useSupabase } from "@/lib/supabase/hooks/useSupabase"; export const useMainAgentPage = ({ @@ -20,13 +20,7 @@ export const useMainAgentPage = ({ data: agent, isLoading: isAgentLoading, isError: isAgentError, - } = useGetV2GetSpecificAgent(creator_lower, params.slug, { - query: { - select: (x) => { - return x.data as StoreAgentDetails; - }, - }, - }); + } = useGetV2GetSpecificAgent(creator_lower, params.slug); const { data: otherAgents, isLoading: isOtherAgentsLoading, @@ -59,12 +53,12 @@ export const useMainAgentPage = ({ data: libraryAgent, isLoading: isLibraryAgentLoading, isError: isLibraryAgentError, - } = useGetV2GetAgentByStoreId(agent?.active_version_id ?? "", { + } = useGetV2GetAgentByStoreId(okData(agent)?.active_version_id ?? "", { query: { select: (x) => { return x.data as LibraryAgent; }, - enabled: !!user && !!agent?.active_version_id, + enabled: !!user && !!okData(agent)?.active_version_id, }, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx index bdc735ea80..5b85ade1ae 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/AgentTableRow.tsx @@ -33,6 +33,7 @@ export interface AgentTableRowProps { video_url?: string; categories?: string[]; store_listing_version_id?: string; + changes_summary?: string; onViewSubmission: (submission: StoreSubmission) => void; onDeleteSubmission: (submission_id: string) => void; onEditSubmission: ( @@ -58,6 +59,7 @@ export const AgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, onViewSubmission, onDeleteSubmission, onEditSubmission, @@ -80,6 +82,7 @@ export const AgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, }); // Determine if we should show Edit or View button diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts index 7014eec198..14fbac4336 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/AgentTableRow/useAgentTableRow.ts @@ -25,6 +25,7 @@ interface useAgentTableRowProps { video_url?: string; categories?: string[]; store_listing_version_id?: string; + changes_summary?: string; } export const useAgentTableRow = ({ @@ -44,6 +45,7 @@ export const useAgentTableRow = ({ video_url, categories, store_listing_version_id, + changes_summary, }: useAgentTableRowProps) => { const handleView = () => { onViewSubmission({ @@ -72,7 +74,7 @@ export const useAgentTableRow = ({ image_urls: imageSrc, video_url, categories, - changes_summary: "Update Submission", + changes_summary: changes_summary || "Update Submission", store_listing_version_id, agent_id, }); diff --git a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx index 71968d08c9..e53244db77 100644 --- a/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx +++ b/autogpt_platform/frontend/src/app/(platform)/profile/(user)/dashboard/components/MainDashboardPage/MainDashboardPage.tsx @@ -98,6 +98,7 @@ export const MainDashboardPage = () => { slug: submission.slug, store_listing_version_id: submission.store_listing_version_id || undefined, + changes_summary: submission.changes_summary || undefined, }))} onViewSubmission={onViewSubmission} onDeleteSubmission={onDeleteSubmission} diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json index 2ead2189ed..ea3bbcc5d8 100644 --- a/autogpt_platform/frontend/src/app/api/openapi.json +++ b/autogpt_platform/frontend/src/app/api/openapi.json @@ -5113,6 +5113,16 @@ "in": "path", "required": true, "schema": { "type": "string", "title": "Agent Name" } + }, + { + "name": "include_changelog", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Include Changelog" + } } ], "responses": { @@ -6510,6 +6520,16 @@ "required": ["file"], "title": "Body_postV2Upload submission media" }, + "ChangelogEntry": { + "properties": { + "version": { "type": "string", "title": "Version" }, + "changes_summary": { "type": "string", "title": "Changes Summary" }, + "date": { "type": "string", "format": "date-time", "title": "Date" } + }, + "type": "object", + "required": ["version", "changes_summary", "date"], + "title": "ChangelogEntry" + }, "ChatRequest": { "properties": { "query": { "type": "string", "title": "Query" }, @@ -7953,6 +7973,11 @@ "title": "Auto Update Version", "description": "Auto-update the agent version" }, + "graph_version": { + "anyOf": [{ "type": "integer" }, { "type": "null" }], + "title": "Graph Version", + "description": "Specific graph version to update to" + }, "is_favorite": { "anyOf": [{ "type": "boolean" }, { "type": "null" }], "title": "Is Favorite", @@ -9508,6 +9533,12 @@ "type": "array", "title": "Versions" }, + "agentGraphVersions": { + "items": { "type": "string" }, + "type": "array", + "title": "Agentgraphversions" + }, + "agentGraphId": { "type": "string", "title": "Agentgraphid" }, "last_updated": { "type": "string", "format": "date-time", @@ -9525,6 +9556,16 @@ "type": "boolean", "title": "Has Approved Version", "default": false + }, + "changelog": { + "anyOf": [ + { + "items": { "$ref": "#/components/schemas/ChangelogEntry" }, + "type": "array" + }, + { "type": "null" } + ], + "title": "Changelog" } }, "type": "object", @@ -9543,6 +9584,8 @@ "runs", "rating", "versions", + "agentGraphVersions", + "agentGraphId", "last_updated" ], "title": "StoreAgentDetails" diff --git a/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx b/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx new file mode 100644 index 0000000000..4f826f6e85 --- /dev/null +++ b/autogpt_platform/frontend/src/components/contextual/MarketplaceBanners/MarketplaceBanners.tsx @@ -0,0 +1,102 @@ +"use client"; + +import { Button } from "@/components/atoms/Button/Button"; +import { Text } from "@/components/atoms/Text/Text"; + +interface MarketplaceBannersProps { + hasUpdate?: boolean; + latestVersion?: number; + hasUnpublishedChanges?: boolean; + currentVersion?: number; + isUpdating?: boolean; + onUpdate?: () => void; + onPublish?: () => void; + onViewChanges?: () => void; +} + +export function MarketplaceBanners({ + hasUpdate, + latestVersion, + hasUnpublishedChanges, + isUpdating, + onUpdate, + onPublish, +}: MarketplaceBannersProps) { + const renderUpdateBanner = () => { + if (hasUpdate && latestVersion) { + return ( +
+
+
+ + Update available + + + You should update your agent in order to get the latest / best + results + +
+ {onUpdate && ( +
+ +
+ )} +
+
+ ); + } + return null; + }; + + const renderUnpublishedChangesBanner = () => { + if (hasUnpublishedChanges) { + return ( +
+
+
+ + Unpublished changes + + + You've made changes to this agent that aren't + published yet. Would you like to publish the latest version? + +
+ {onPublish && ( +
+ +
+ )} +
+
+ ); + } + return null; + }; + + return ( + <> + {renderUpdateBanner()} + {renderUnpublishedChangesBanner()} + + ); +} diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx index 2f5f9aeacf..dd91094f9c 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/PublishAgentModal.tsx @@ -18,6 +18,8 @@ export function PublishAgentModal({ trigger, targetState, onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, }: Props) { const { // State @@ -34,7 +36,12 @@ export function PublishAgentModal({ handleGoToBuilder, handleSuccessFromInfo, handleBack, - } = usePublishAgentModal({ targetState, onStateChange }); + } = usePublishAgentModal({ + targetState, + onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, + }); const { user, isUserLoading } = useSupabase(); @@ -65,6 +72,7 @@ export function PublishAgentModal({ selectedAgentId={selectedAgentId} selectedAgentVersion={selectedAgentVersion} initialData={initialData} + isMarketplaceUpdate={!!currentState.submissionData} /> ); case "review": diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx index 3ec680ca95..7cd6b25d91 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/AgentInfoStep.tsx @@ -19,6 +19,7 @@ export function AgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate, }: Props) { const { form, @@ -34,6 +35,7 @@ export function AgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate, }); const [cronScheduleDialogOpen, setCronScheduleDialogOpen] = @@ -65,6 +67,41 @@ export function AgentInfoStep({ + {/* Changes summary field - only shown for updates */} + {isMarketplaceUpdate && ( + ( +
+ + + This is required to help users understand what's + different in this update. + +
+ )} + /> + )} + + {/* Optional section label for updates */} + {isMarketplaceUpdate && ( +
+ + Optional: Update any of the following details (or leave them + as-is) + +
+ )} + (null); const { toast } = useToast(); + // Memoize the stringified version to detect actual changes + const initialImagesKey = JSON.stringify(initialImages); + + // Update images when initialImages prop changes (by value, not reference) + useEffect(() => { + if (initialImages.length > 0) { + setImages(initialImages); + setSelectedImage(initialSelectedImage || initialImages[0]); + } + }, [initialImagesKey, initialSelectedImage]); // Use stringified key instead of array reference + // Notify parent when images change useEffect(() => { onImagesChange(images); diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts index bf7ed17219..86e6c0ce30 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/helpers.ts @@ -1,45 +1,113 @@ import z from "zod"; import { validateYouTubeUrl } from "@/lib/utils"; -export const publishAgentSchema = z.object({ - title: z - .string() - .min(1, "Title is required") - .max(100, "Title must be less than 100 characters"), - subheader: z - .string() - .min(1, "Subheader is required") - .max(200, "Subheader must be less than 200 characters"), - slug: z - .string() - .min(1, "Slug is required") - .max(50, "Slug must be less than 50 characters") - .regex( - /^[a-z0-9-]+$/, - "Slug can only contain lowercase letters, numbers, and hyphens", - ), - youtubeLink: z - .string() - .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), - category: z.string().min(1, "Category is required"), - description: z - .string() - .min(1, "Description is required") - .max(1000, "Description must be less than 1000 characters"), - recommendedScheduleCron: z.string().optional(), - instructions: z - .string() - .optional() - .refine( - (val) => !val || val.length <= 2000, - "Instructions must be less than 2000 characters", - ), - agentOutputDemo: z - .string() - .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), -}); +// Create conditional schema that changes based on whether it's a marketplace update +export const publishAgentSchemaFactory = ( + isMarketplaceUpdate: boolean = false, +) => { + const baseSchema = { + changesSummary: isMarketplaceUpdate + ? z + .string() + .min(1, "Changes summary is required for updates") + .max(500, "Changes summary must be less than 500 characters") + : z.string().optional(), + title: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 100, + "Title must be less than 100 characters", + ) + : z + .string() + .min(1, "Title is required") + .max(100, "Title must be less than 100 characters"), + subheader: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 200, + "Subheader must be less than 200 characters", + ) + : z + .string() + .min(1, "Subheader is required") + .max(200, "Subheader must be less than 200 characters"), + slug: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || (val.length <= 50 && /^[a-z0-9-]+$/.test(val)), + "Slug can only contain lowercase letters, numbers, and hyphens", + ) + : z + .string() + .min(1, "Slug is required") + .max(50, "Slug must be less than 50 characters") + .regex( + /^[a-z0-9-]+$/, + "Slug can only contain lowercase letters, numbers, and hyphens", + ), + youtubeLink: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || validateYouTubeUrl(val), + "Please enter a valid YouTube URL", + ) + : z + .string() + .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), + category: isMarketplaceUpdate + ? z.string().optional() + : z.string().min(1, "Category is required"), + description: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || val.length <= 1000, + "Description must be less than 1000 characters", + ) + : z + .string() + .min(1, "Description is required") + .max(1000, "Description must be less than 1000 characters"), + recommendedScheduleCron: z.string().optional(), + instructions: z + .string() + .optional() + .refine( + (val) => !val || val.length <= 2000, + "Instructions must be less than 2000 characters", + ), + agentOutputDemo: isMarketplaceUpdate + ? z + .string() + .optional() + .refine( + (val) => !val || validateYouTubeUrl(val), + "Please enter a valid YouTube URL", + ) + : z + .string() + .refine(validateYouTubeUrl, "Please enter a valid YouTube URL"), + }; -export type PublishAgentFormData = z.infer; + return z.object(baseSchema); +}; + +// Default schema for backwards compatibility +export const publishAgentSchema = publishAgentSchemaFactory(false); + +export type PublishAgentFormData = z.infer< + ReturnType +>; export interface PublishAgentInfoInitialData { agent_id: string; @@ -54,4 +122,5 @@ export interface PublishAgentInfoInitialData { recommendedScheduleCron?: string; instructions?: string; agentOutputDemo?: string; + changesSummary?: string; } diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts index 6bec8dd355..f3dcfa1f21 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentInfoStep/useAgentInfoStep.ts @@ -9,7 +9,7 @@ import * as Sentry from "@sentry/nextjs"; import { PublishAgentFormData, PublishAgentInfoInitialData, - publishAgentSchema, + publishAgentSchemaFactory, } from "./helpers"; export interface Props { @@ -18,6 +18,7 @@ export interface Props { selectedAgentId: string | null; selectedAgentVersion: number | null; initialData?: PublishAgentInfoInitialData; + isMarketplaceUpdate?: boolean; } export function useAgentInfoStep({ @@ -26,6 +27,7 @@ export function useAgentInfoStep({ selectedAgentId, selectedAgentVersion, initialData, + isMarketplaceUpdate = false, }: Props) { const [agentId, setAgentId] = useState(null); const [images, setImages] = useState([]); @@ -36,8 +38,9 @@ export function useAgentInfoStep({ const api = useBackendAPI(); const form = useForm({ - resolver: zodResolver(publishAgentSchema), + resolver: zodResolver(publishAgentSchemaFactory(isMarketplaceUpdate)), defaultValues: { + changesSummary: "", title: "", subheader: "", slug: "", @@ -61,6 +64,7 @@ export function useAgentInfoStep({ // Update form with initial data form.reset({ + changesSummary: initialData.changesSummary || "", title: initialData.title, subheader: initialData.subheader, slug: initialData.slug.toLocaleLowerCase().trim(), @@ -104,9 +108,10 @@ export function useAgentInfoStep({ agent_output_demo_url: data.agentOutputDemo || "", agent_id: selectedAgentId || "", agent_version: selectedAgentVersion || 0, - slug: data.slug.replace(/\s+/g, "-"), + slug: (data.slug || "").replace(/\s+/g, "-"), categories: filteredCategories, recommended_schedule_cron: data.recommendedScheduleCron || null, + changes_summary: data.changesSummary || null, } as any); await queryClient.invalidateQueries({ diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx index ba7456aa0d..58caa334dd 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/components/AgentReviewStep.tsx @@ -52,7 +52,7 @@ export function AgentReviewStep({ {subheader} @@ -80,7 +80,7 @@ export function AgentReviewStep({ {description ? ( {description} diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts index a7175736b4..358c4da260 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/helpers.ts @@ -8,4 +8,8 @@ export const emptyModalState = { category: "", description: "", recommendedScheduleCron: "", + instructions: "", + agentOutputDemo: "", + changesSummary: "", + additionalImages: [], }; diff --git a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts index 8face2c6b8..f83698d8e7 100644 --- a/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts +++ b/autogpt_platform/frontend/src/components/contextual/PublishAgentModal/usePublishAgentModal.ts @@ -3,6 +3,12 @@ import { useCallback, useEffect, useState } from "react"; import { PublishAgentInfoInitialData } from "./components/AgentInfoStep/helpers"; import { useRouter } from "next/navigation"; import { emptyModalState } from "./helpers"; +import { + useGetV2GetMyAgents, + useGetV2ListMySubmissions, +} from "@/app/api/__generated__/endpoints/store/store"; +import { okData } from "@/app/api/helpers"; +import type { MyAgent } from "@/app/api/__generated__/models/myAgent"; const defaultTargetState: PublishState = { isOpen: false, @@ -22,9 +28,16 @@ export interface Props { trigger?: React.ReactNode; targetState?: PublishState; onStateChange?: (state: PublishState) => void; + preSelectedAgentId?: string; + preSelectedAgentVersion?: number; } -export function usePublishAgentModal({ targetState, onStateChange }: Props) { +export function usePublishAgentModal({ + targetState, + onStateChange, + preSelectedAgentId, + preSelectedAgentVersion, +}: Props) { const [currentState, setCurrentState] = useState( targetState || defaultTargetState, ); @@ -42,14 +55,20 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { const [_, setSelectedAgent] = useState(null); - const [selectedAgentId, setSelectedAgentId] = useState(null); + const [selectedAgentId, setSelectedAgentId] = useState( + preSelectedAgentId || null, + ); const [selectedAgentVersion, setSelectedAgentVersion] = useState< number | null - >(null); + >(preSelectedAgentVersion || null); const router = useRouter(); + // Fetch agent data for pre-populating form when agent is pre-selected + const { data: myAgents } = useGetV2GetMyAgents(); + const { data: mySubmissions } = useGetV2ListMySubmissions(); + // Sync currentState with targetState when it changes from outside useEffect(() => { if (targetState) { @@ -60,13 +79,90 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { // Reset internal state when modal opens useEffect(() => { if (!targetState) return; - if (targetState.isOpen && targetState.step === "select") { + if (targetState.isOpen) { setSelectedAgent(null); - setSelectedAgentId(null); - setSelectedAgentVersion(null); + setSelectedAgentId(preSelectedAgentId || null); + setSelectedAgentVersion(preSelectedAgentVersion || null); setInitialData(emptyModalState); } - }, [targetState]); + }, [targetState, preSelectedAgentId, preSelectedAgentVersion]); + + // Pre-populate form data when modal opens with info step and pre-selected agent + useEffect(() => { + if ( + !targetState?.isOpen || + targetState.step !== "info" || + !preSelectedAgentId || + !preSelectedAgentVersion + ) + return; + const agentsData = okData(myAgents) as any; + const submissionsData = okData(mySubmissions) as any; + + if (!agentsData || !submissionsData) return; + + // Find the agent data + const agent = agentsData.agents?.find( + (a: MyAgent) => a.agent_id === preSelectedAgentId, + ); + if (!agent) return; + + // Find published submission data for this agent (for updates) + const publishedSubmissionData = submissionsData.submissions + ?.filter( + (s: StoreSubmission) => + s.status === "APPROVED" && s.agent_id === preSelectedAgentId, + ) + .sort( + (a: StoreSubmission, b: StoreSubmission) => + b.agent_version - a.agent_version, + )[0]; + + // Populate initial data (same logic as handleNextFromSelect) + const initialFormData: PublishAgentInfoInitialData = publishedSubmissionData + ? { + agent_id: preSelectedAgentId, + title: publishedSubmissionData.name, + subheader: publishedSubmissionData.sub_heading || "", + description: publishedSubmissionData.description, + instructions: publishedSubmissionData.instructions || "", + youtubeLink: publishedSubmissionData.video_url || "", + agentOutputDemo: publishedSubmissionData.agent_output_demo_url || "", + additionalImages: [ + ...new Set(publishedSubmissionData.image_urls || []), + ].filter(Boolean) as string[], + category: publishedSubmissionData.categories?.[0] || "", + thumbnailSrc: agent.agent_image || "https://picsum.photos/300/200", + slug: publishedSubmissionData.slug, + recommendedScheduleCron: agent.recommended_schedule_cron || "", + changesSummary: publishedSubmissionData.changes_summary || "", + } + : { + ...emptyModalState, + agent_id: preSelectedAgentId, + title: agent.agent_name, + description: agent.description || "", + thumbnailSrc: agent.agent_image || "https://picsum.photos/300/200", + slug: agent.agent_name.replace(/ /g, "-"), + recommendedScheduleCron: agent.recommended_schedule_cron || "", + }; + + setInitialData(initialFormData); + + // Update the state with the submission data if this is an update + if (publishedSubmissionData) { + setCurrentState((prevState) => ({ + ...prevState, + submissionData: publishedSubmissionData, + })); + } + }, [ + targetState, + preSelectedAgentId, + preSelectedAgentVersion, + myAgents, + mySubmissions, + ]); function handleClose() { // Reset all internal state @@ -97,20 +193,43 @@ export function usePublishAgentModal({ targetState, onStateChange }: Props) { imageSrc: string; recommendedScheduleCron: string | null; }, + publishedSubmissionData?: StoreSubmission | null, ) { - setInitialData({ - ...emptyModalState, - agent_id: agentId, - title: agentData.name, - description: agentData.description, - thumbnailSrc: agentData.imageSrc, - slug: agentData.name.replace(/ /g, "-"), - recommendedScheduleCron: agentData.recommendedScheduleCron || "", - }); + // Pre-populate with published data if this is an update, otherwise use agent data + const initialFormData: PublishAgentInfoInitialData = publishedSubmissionData + ? { + agent_id: agentId, + title: publishedSubmissionData.name, + subheader: publishedSubmissionData.sub_heading || "", + description: publishedSubmissionData.description, + instructions: publishedSubmissionData.instructions || "", + youtubeLink: publishedSubmissionData.video_url || "", + agentOutputDemo: publishedSubmissionData.agent_output_demo_url || "", + additionalImages: [ + ...new Set(publishedSubmissionData.image_urls || []), + ].filter(Boolean) as string[], + category: publishedSubmissionData.categories?.[0] || "", // Take first category + thumbnailSrc: agentData.imageSrc, // Use current agent image + slug: publishedSubmissionData.slug, + recommendedScheduleCron: agentData.recommendedScheduleCron || "", + changesSummary: publishedSubmissionData.changes_summary || "", // Pre-populate with existing changes summary + } + : { + ...emptyModalState, + agent_id: agentId, + title: agentData.name, + description: agentData.description, + thumbnailSrc: agentData.imageSrc, + slug: agentData.name.replace(/ /g, "-"), + recommendedScheduleCron: agentData.recommendedScheduleCron || "", + }; + + setInitialData(initialFormData); updateState({ ...currentState, step: "info", + submissionData: publishedSubmissionData || null, }); setSelectedAgentId(agentId); diff --git a/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts b/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts new file mode 100644 index 0000000000..a080eca57f --- /dev/null +++ b/autogpt_platform/frontend/src/components/contextual/marketplaceHelpers.ts @@ -0,0 +1,57 @@ +/** + * Marketplace-specific helper functions that can be reused across different marketplace screens + */ + +/** + * Calculate the latest marketplace version from agent graph versions + */ +export function getLatestMarketplaceVersion( + agentGraphVersions?: string[], +): number | undefined { + if (!agentGraphVersions?.length) return undefined; + + return Math.max(...agentGraphVersions.map((v: string) => parseInt(v, 10))); +} + +/** + * Check if the current user is the creator of the agent + * Uses ID-based comparison for accurate matching + */ +export function isUserCreator( + creatorId: string | undefined, + currentUserId: string | undefined, +): boolean { + if (!creatorId || !currentUserId) return false; + return creatorId === currentUserId; +} + +/** + * Calculate update status for an agent + */ +export function calculateUpdateStatus({ + latestMarketplaceVersion, + currentVersion, + isUserCreator, + isAgentAddedToLibrary, +}: { + latestMarketplaceVersion?: number; + currentVersion: number; + isUserCreator: boolean; + isAgentAddedToLibrary: boolean; +}) { + if (!latestMarketplaceVersion) { + return { hasUpdate: false, hasUnpublishedChanges: false }; + } + + const hasUnpublishedChanges = + isUserCreator && + isAgentAddedToLibrary && + currentVersion > latestMarketplaceVersion; + + const hasUpdate = + isAgentAddedToLibrary && + !isUserCreator && + latestMarketplaceVersion > currentVersion; + + return { hasUpdate, hasUnpublishedChanges }; +}