From 6eee9206f71e74526e0d66014dea41cab8ded1b9 Mon Sep 17 00:00:00 2001 From: Andy Hooker <58448663+andrewhooker2@users.noreply.github.com> Date: Mon, 10 Feb 2025 16:01:29 -0600 Subject: [PATCH 01/12] fix(market): Market featured agent card (#9463) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Background Resolves: #9313 The marketplace featured agent's section has a bug where if you hover over a featured agent's card we are getting an incorrect background color applied to the description. ### Changes 🏗️ 1. Refactored `FeaturedStoreCard` to `FeaturedAgentCard`: - Condensed props and leverage StoreAgent type from api - Removed onClick handler from props as this is not json serializable and is not inline with NextJS best practices - Used built in Card Components from ShadCN to minimize custom styling. - Optimize images with implementation of the Image component from NextJS 2. Enhanced `FeaturedCardSection` components: - Removing extensive prop passing and leverage the agent itself with the StoreAgent type. - Implemented Link from NextJS to better handler routing and remove the `useRouter` implementation - Removed unnecessary handleCardClick method. ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: Test Plan
- [ ] Goto the landing page of the application or /marketplace - [ ] Scroll to the featured agents section - [ ] Move mouse over each of the cards and observe the image disappearing and text being shown - [ ] Observe the background color of the text that replaced the image matches that of the card
--- .../frontend/src/app/marketplace/page.tsx | 9 +- .../components/agptui/FeaturedAgentCard.tsx | 77 +++++++++ .../agptui/FeaturedStoreCard.stories.tsx | 152 ++++++------------ .../components/agptui/FeaturedStoreCard.tsx | 96 ----------- .../composite/FeaturedSection.stories.tsx | 8 +- .../agptui/composite/FeaturedSection.tsx | 45 ++---- 6 files changed, 141 insertions(+), 246 deletions(-) create mode 100644 autogpt_platform/frontend/src/components/agptui/FeaturedAgentCard.tsx delete mode 100644 autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.tsx diff --git a/autogpt_platform/frontend/src/app/marketplace/page.tsx b/autogpt_platform/frontend/src/app/marketplace/page.tsx index 297fee0c7f..af5a2867c6 100644 --- a/autogpt_platform/frontend/src/app/marketplace/page.tsx +++ b/autogpt_platform/frontend/src/app/marketplace/page.tsx @@ -1,9 +1,6 @@ import * as React from "react"; import { HeroSection } from "@/components/agptui/composite/HeroSection"; -import { - FeaturedSection, - FeaturedAgent, -} from "@/components/agptui/composite/FeaturedSection"; +import { FeaturedSection } from "@/components/agptui/composite/FeaturedSection"; import { AgentsSection, Agent, @@ -155,9 +152,7 @@ export default async function Page({}: {}) {
- + = ({ + agent, + backgroundColor, +}) => { + const [isHovered, setIsHovered] = useState(false); + + return ( + setIsHovered(true)} + onMouseLeave={() => setIsHovered(false)} + className={backgroundColor} + > + + {agent.agent_name} + {agent.description} + + +
+
+ {`${agent.agent_name} +
+
+

+ {agent.description} +

+
+
+
+ +
+ {agent.runs?.toLocaleString() ?? "0"} runs +
+
+

{agent.rating.toFixed(1) ?? "0.0"}

+
+ {StarRatingIcons(agent.rating)} +
+
+
+
+ ); +}; diff --git a/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.stories.tsx b/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.stories.tsx index b941ba9215..5a237ee5bd 100644 --- a/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.stories.tsx +++ b/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.stories.tsx @@ -1,10 +1,10 @@ import type { Meta, StoryObj } from "@storybook/react"; -import { FeaturedStoreCard } from "./FeaturedStoreCard"; +import { FeaturedAgentCard } from "./FeaturedAgentCard"; import { userEvent, within } from "@storybook/test"; const meta = { title: "AGPT UI/Featured Store Card", - component: FeaturedStoreCard, + component: FeaturedAgentCard, parameters: { layout: { center: true, @@ -13,123 +13,63 @@ const meta = { }, tags: ["autodocs"], argTypes: { - agentName: { control: "text" }, - subHeading: { control: "text" }, - agentImage: { control: "text" }, - creatorImage: { control: "text" }, - creatorName: { control: "text" }, - description: { control: "text" }, - runs: { control: "number" }, - rating: { control: "number", min: 0, max: 5, step: 0.1 }, - onClick: { action: "clicked" }, + agent: { + agent_name: { control: "text" }, + sub_heading: { control: "text" }, + agent_image: { control: "text" }, + creator_avatar: { control: "text" }, + creator: { control: "text" }, + runs: { control: "number" }, + rating: { control: "number", min: 0, max: 5, step: 0.1 }, + slug: { control: "text" }, + }, + backgroundColor: { + control: "color", + }, }, -} satisfies Meta; +} satisfies Meta; export default meta; type Story = StoryObj; export const Default: Story = { args: { - agentName: "Personalized Morning Coffee Newsletter example of three lines", - subHeading: - "Transform ideas into breathtaking images with this AI-powered Image Generator.", - description: - "Elevate your web content with this powerful AI Webpage Copy Improver. Designed for marketers, SEO specialists, and web developers, this tool analyses and enhances website copy for maximum impact. Using advanced language models, it optimizes text for better clarity, SEO performance, and increased conversion rates.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "AI Solutions Inc.", - runs: 50000, - rating: 4.7, - onClick: () => console.log("Card clicked"), - backgroundColor: "bg-white", - }, -}; - -export const LowRating: Story = { - args: { - agentName: "Data Analyzer Lite", - subHeading: "Basic data analysis tool", - description: - "A lightweight data analysis tool for basic data processing needs.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "DataTech", - runs: 10000, - rating: 2.8, - onClick: () => console.log("Card clicked"), - backgroundColor: "bg-white", - }, -}; - -export const HighRuns: Story = { - args: { - agentName: "CodeAssist AI", - subHeading: "Your AI coding companion", - description: - "An intelligent coding assistant that helps developers write better code faster.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "DevTools Co.", - runs: 1000000, - rating: 4.9, - onClick: () => console.log("Card clicked"), - backgroundColor: "bg-white", - }, -}; - -export const NoCreatorImage: Story = { - args: { - agentName: "MultiTasker", - subHeading: "All-in-one productivity suite", - description: - "A comprehensive productivity suite that combines task management, note-taking, and project planning into one seamless interface.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "Productivity Plus", - runs: 75000, - rating: 4.5, - onClick: () => console.log("Card clicked"), - backgroundColor: "bg-white", - }, -}; - -export const ShortDescription: Story = { - args: { - agentName: "QuickTask", - subHeading: "Fast task automation", - description: "Simple and efficient task automation tool.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "EfficientWorks", - runs: 50000, - rating: 4.2, - onClick: () => console.log("Card clicked"), + agent: { + agent_name: + "Personalized Morning Coffee Newsletter example of three lines", + sub_heading: + "Transform ideas into breathtaking images with this AI-powered Image Generator.", + description: + "Elevate your web content with this powerful AI Webpage Copy Improver. Designed for marketers, SEO specialists, and web developers, this tool analyses and enhances website copy for maximum impact. Using advanced language models, it optimizes text for better clarity, SEO performance, and increased conversion rates.", + agent_image: + "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", + creator_avatar: + "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", + creator: "AI Solutions Inc.", + runs: 50000, + rating: 4.7, + slug: "", + }, backgroundColor: "bg-white", }, }; export const WithInteraction: Story = { args: { - agentName: "AI Writing Assistant", - subHeading: "Enhance your writing", - description: - "An AI-powered writing assistant that helps improve your writing style and clarity.", - agentImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorImage: - "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", - creatorName: "WordCraft AI", - runs: 200000, - rating: 4.6, - onClick: () => console.log("Card clicked"), + agent: { + slug: "", + agent_name: "AI Writing Assistant", + sub_heading: "Enhance your writing", + description: + "An AI-powered writing assistant that helps improve your writing style and clarity.", + agent_image: + "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", + creator_avatar: + "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", + creator: "WordCraft AI", + runs: 200000, + rating: 4.6, + }, backgroundColor: "bg-white", }, play: async ({ canvasElement }) => { diff --git a/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.tsx b/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.tsx deleted file mode 100644 index cbfdae239d..0000000000 --- a/autogpt_platform/frontend/src/components/agptui/FeaturedStoreCard.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import * as React from "react"; -import Image from "next/image"; -import { StarRatingIcons } from "@/components/ui/icons"; - -interface FeaturedStoreCardProps { - agentName: string; - subHeading: string; - agentImage: string; - creatorImage?: string; - creatorName: string; - description: string; // Added description prop - runs: number; - rating: number; - onClick: () => void; - backgroundColor: string; -} - -export const FeaturedStoreCard: React.FC = ({ - agentName, - subHeading, - agentImage, - creatorImage, - creatorName, - description, - runs, - rating, - onClick, - backgroundColor, -}) => { - return ( -
-
-

- {agentName} -

-
- {subHeading} -
-
- -
-
- by {creatorName} -
- -
- {`${agentName} -
-
- {description} -
-
- {creatorImage && ( -
- {`${creatorName} -
- )} -
- -
-
- {runs.toLocaleString()} runs -
-
-
- {rating.toFixed(1)} -
-
- {StarRatingIcons(rating)} -
-
-
-
-
- ); -}; diff --git a/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.stories.tsx b/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.stories.tsx index b57b6477f3..d494a428fe 100644 --- a/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.stories.tsx +++ b/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.stories.tsx @@ -1,6 +1,7 @@ import type { Meta, StoryObj } from "@storybook/react"; -import { FeaturedAgent, FeaturedSection } from "./FeaturedSection"; -import { userEvent, within, expect } from "@storybook/test"; +import { FeaturedSection } from "./FeaturedSection"; +import { userEvent, within } from "@storybook/test"; +import { StoreAgent } from "@/lib/autogpt-server-api"; const meta = { title: "AGPT UI/Composite/Featured Agents", @@ -15,7 +16,6 @@ const meta = { tags: ["autodocs"], argTypes: { featuredAgents: { control: "object" }, - // onCardClick: { action: "clicked" }, }, } satisfies Meta; @@ -93,7 +93,7 @@ const mockFeaturedAgents = [ "https://framerusercontent.com/images/KCIpxr9f97EGJgpaoqnjKsrOPwI.jpg", slug: "quicktask", }, -] satisfies FeaturedAgent[]; +] satisfies StoreAgent[]; export const Default: Story = { args: { diff --git a/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.tsx b/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.tsx index 5c1d257d92..ef91b7de90 100644 --- a/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.tsx +++ b/autogpt_platform/frontend/src/components/agptui/composite/FeaturedSection.tsx @@ -1,7 +1,7 @@ "use client"; import * as React from "react"; -import { FeaturedStoreCard } from "@/components/agptui/FeaturedStoreCard"; +import { FeaturedAgentCard } from "@/components/agptui/FeaturedAgentCard"; import { Carousel, CarouselContent, @@ -11,7 +11,8 @@ import { CarouselIndicator, } from "@/components/ui/carousel"; import { useCallback, useState } from "react"; -import { useRouter } from "next/navigation"; +import { StoreAgent } from "@/lib/autogpt-server-api"; +import Link from "next/link"; const BACKGROUND_COLORS = [ "bg-violet-200 dark:bg-violet-800", // #ddd6fe / #5b21b6 @@ -19,33 +20,14 @@ const BACKGROUND_COLORS = [ "bg-green-200 dark:bg-green-800", // #bbf7d0 / #065f46 ]; -export interface FeaturedAgent { - slug: string; - agent_name: string; - agent_image: string; - creator: string; - creator_avatar: string; - sub_heading: string; - description: string; - runs: number; - rating: number; -} - interface FeaturedSectionProps { - featuredAgents: FeaturedAgent[]; + featuredAgents: StoreAgent[]; } export const FeaturedSection: React.FC = ({ featuredAgents, }) => { const [currentSlide, setCurrentSlide] = useState(0); - const router = useRouter(); - - const handleCardClick = (creator: string, slug: string) => { - router.push( - `/marketplace/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`, - ); - }; const handlePrevSlide = useCallback(() => { setCurrentSlide((prev) => @@ -84,17 +66,14 @@ export const FeaturedSection: React.FC = ({ key={index} className="max-w-[460px] flex-[0_0_auto]" > - handleCardClick(agent.creator, agent.slug)} - /> + + + ))} From 40613fe23e0d50970c367590b5369a6e321dd419 Mon Sep 17 00:00:00 2001 From: Andy Hooker <58448663+andrewhooker2@users.noreply.github.com> Date: Tue, 11 Feb 2025 05:25:15 -0600 Subject: [PATCH 02/12] fix(frontend): Update user profile from marketplace to appropriate profile route (#9465) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Background Resolves: #9313 The application is incorrectly nesting the user's profile settings within the route of `/marketplace` instead of the appropriate route of `/profile` This pr will modify the existing code to handle the relocation of the (user) directory from the /marketplace to /profile. ### Changes 🏗️ 1. Refactored directory of `(user)`: - Moved the directory of (user) from `/marketplace/(user)` to `profile/(user)` 2. Update Sidebar and Navbar components: - Updating the existing code from the routing of market to profile by modifying the existing routes. ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan:
Test Plan - [ ] Navigate to the route of `profile/` and observe the moved page. - [ ] Navigate to the route of `profile/integrations` and observe the moved page. - [ ] Navigate to the route of `profile/api_keys` and observe the moved page. - [ ] Navigate to the route of `profile/profile` and observe the moved page. - [ ] Navigate to the route of `profile/settings` and observe the moved page. - [ ] Navigate to the route of `profile/credits` and observe the moved page.
--- autogpt_platform/frontend/src/app/layout.tsx | 6 +- .../(user)/api_keys/page.tsx | 0 .../(user)/credits/page.tsx | 0 .../(user)/dashboard/page.tsx | 1 - .../(user)/integrations/page.tsx | 3 +- .../(user)/layout.tsx | 14 +- .../profile => profile/(user)}/page.tsx | 0 .../(user)/settings/page.tsx | 0 .../frontend/src/app/profile/page.tsx | 242 ------------------ .../src/components/agptui/Sidebar.tsx | 24 +- 10 files changed, 23 insertions(+), 267 deletions(-) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/api_keys/page.tsx (100%) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/credits/page.tsx (100%) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/dashboard/page.tsx (98%) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/integrations/page.tsx (98%) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/layout.tsx (50%) rename autogpt_platform/frontend/src/app/{marketplace/(user)/profile => profile/(user)}/page.tsx (100%) rename autogpt_platform/frontend/src/app/{marketplace => profile}/(user)/settings/page.tsx (100%) delete mode 100644 autogpt_platform/frontend/src/app/profile/page.tsx diff --git a/autogpt_platform/frontend/src/app/layout.tsx b/autogpt_platform/frontend/src/app/layout.tsx index 9f05958370..e6888f6fb4 100644 --- a/autogpt_platform/frontend/src/app/layout.tsx +++ b/autogpt_platform/frontend/src/app/layout.tsx @@ -66,7 +66,7 @@ export default async function RootLayout({ { icon: IconType.Edit, text: "Edit profile", - href: "/marketplace/profile", + href: "/profile", }, ], }, @@ -75,7 +75,7 @@ export default async function RootLayout({ { icon: IconType.LayoutDashboard, text: "Creator Dashboard", - href: "/marketplace/dashboard", + href: "/profile/dashboard", }, { icon: IconType.UploadCloud, @@ -88,7 +88,7 @@ export default async function RootLayout({ { icon: IconType.Settings, text: "Settings", - href: "/marketplace/settings", + href: "/profile/settings", }, ], }, diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/api_keys/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/api_keys/page.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/marketplace/(user)/api_keys/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/api_keys/page.tsx diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/credits/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/credits/page.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/marketplace/(user)/credits/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/credits/page.tsx diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/dashboard/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/dashboard/page.tsx similarity index 98% rename from autogpt_platform/frontend/src/app/marketplace/(user)/dashboard/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/dashboard/page.tsx index 221e751b40..704f4820b6 100644 --- a/autogpt_platform/frontend/src/app/marketplace/(user)/dashboard/page.tsx +++ b/autogpt_platform/frontend/src/app/profile/(user)/dashboard/page.tsx @@ -2,7 +2,6 @@ import * as React from "react"; import { AgentTable } from "@/components/agptui/AgentTable"; -import { AgentTableRowProps } from "@/components/agptui/AgentTableRow"; import { Button } from "@/components/agptui/Button"; import { Separator } from "@/components/ui/separator"; import { StatusType } from "@/components/agptui/Status"; diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/integrations/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/integrations/page.tsx similarity index 98% rename from autogpt_platform/frontend/src/app/marketplace/(user)/integrations/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/integrations/page.tsx index f92d63344d..8069664017 100644 --- a/autogpt_platform/frontend/src/app/marketplace/(user)/integrations/page.tsx +++ b/autogpt_platform/frontend/src/app/profile/(user)/integrations/page.tsx @@ -2,10 +2,9 @@ import { Button } from "@/components/ui/button"; import { useRouter } from "next/navigation"; import { useCallback, useContext, useMemo, useState } from "react"; -import { Separator } from "@/components/ui/separator"; import { useToast } from "@/components/ui/use-toast"; import { IconKey, IconUser } from "@/components/ui/icons"; -import { LogOutIcon, Trash2Icon } from "lucide-react"; +import { Trash2Icon } from "lucide-react"; import { providerIcons } from "@/components/integrations/credentials-input"; import { CredentialsProvidersContext } from "@/components/integrations/credentials-provider"; import { diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/layout.tsx b/autogpt_platform/frontend/src/app/profile/(user)/layout.tsx similarity index 50% rename from autogpt_platform/frontend/src/app/marketplace/(user)/layout.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/layout.tsx index ea88d5edc4..72b4d09fed 100644 --- a/autogpt_platform/frontend/src/app/marketplace/(user)/layout.tsx +++ b/autogpt_platform/frontend/src/app/profile/(user)/layout.tsx @@ -5,13 +5,13 @@ export default function Layout({ children }: { children: React.ReactNode }) { const sidebarLinkGroups = [ { links: [ - { text: "Creator Dashboard", href: "/marketplace/dashboard" }, - { text: "Agent dashboard", href: "/marketplace/agent-dashboard" }, - { text: "Billing", href: "/marketplace/credits" }, - { text: "Integrations", href: "/marketplace/integrations" }, - { text: "API Keys", href: "/marketplace/api_keys" }, - { text: "Profile", href: "/marketplace/profile" }, - { text: "Settings", href: "/marketplace/settings" }, + { text: "Creator Dashboard", href: "/profile/dashboard" }, + { text: "Agent dashboard", href: "/profile/agent-dashboard" }, + { text: "Billing", href: "/profile/credits" }, + { text: "Integrations", href: "/profile/integrations" }, + { text: "API Keys", href: "/profile/api_keys" }, + { text: "Profile", href: "/profile" }, + { text: "Settings", href: "/profile/settings" }, ], }, ]; diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/profile/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/page.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/marketplace/(user)/profile/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/page.tsx diff --git a/autogpt_platform/frontend/src/app/marketplace/(user)/settings/page.tsx b/autogpt_platform/frontend/src/app/profile/(user)/settings/page.tsx similarity index 100% rename from autogpt_platform/frontend/src/app/marketplace/(user)/settings/page.tsx rename to autogpt_platform/frontend/src/app/profile/(user)/settings/page.tsx diff --git a/autogpt_platform/frontend/src/app/profile/page.tsx b/autogpt_platform/frontend/src/app/profile/page.tsx deleted file mode 100644 index 445077dabe..0000000000 --- a/autogpt_platform/frontend/src/app/profile/page.tsx +++ /dev/null @@ -1,242 +0,0 @@ -"use client"; -import { Button } from "@/components/ui/button"; -import { useRouter } from "next/navigation"; -import { useCallback, useContext, useMemo, useState } from "react"; -import { Separator } from "@/components/ui/separator"; -import { useToast } from "@/components/ui/use-toast"; -import { IconKey, IconUser } from "@/components/ui/icons"; -import { LogOutIcon, Trash2Icon } from "lucide-react"; -import { providerIcons } from "@/components/integrations/credentials-input"; -import { CredentialsProvidersContext } from "@/components/integrations/credentials-provider"; -import { - Table, - TableBody, - TableCell, - TableHead, - TableHeader, - TableRow, -} from "@/components/ui/table"; -import { CredentialsProviderName } from "@/lib/autogpt-server-api"; -import { - AlertDialog, - AlertDialogAction, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from "@/components/ui/alert-dialog"; -import useSupabase from "@/hooks/useSupabase"; -import Spinner from "@/components/Spinner"; - -export default function PrivatePage() { - const { supabase, user, isUserLoading } = useSupabase(); - const router = useRouter(); - const providers = useContext(CredentialsProvidersContext); - const { toast } = useToast(); - - const [confirmationDialogState, setConfirmationDialogState] = useState< - | { - open: true; - message: string; - onConfirm: () => void; - onReject: () => void; - } - | { open: false } - >({ open: false }); - - const removeCredentials = useCallback( - async ( - provider: CredentialsProviderName, - id: string, - force: boolean = false, - ) => { - if (!providers || !providers[provider]) { - return; - } - - let result; - try { - result = await providers[provider].deleteCredentials(id, force); - } catch (error: any) { - toast({ - title: "Something went wrong when deleting credentials: " + error, - variant: "destructive", - duration: 2000, - }); - setConfirmationDialogState({ open: false }); - return; - } - if (result.deleted) { - if (result.revoked) { - toast({ - title: "Credentials deleted", - duration: 2000, - }); - } else { - toast({ - title: "Credentials deleted from AutoGPT", - description: `You may also manually remove the connection to AutoGPT at ${provider}!`, - duration: 3000, - }); - } - setConfirmationDialogState({ open: false }); - } else if (result.need_confirmation) { - setConfirmationDialogState({ - open: true, - message: result.message, - onConfirm: () => removeCredentials(provider, id, true), - onReject: () => setConfirmationDialogState({ open: false }), - }); - } - }, - [providers, toast], - ); - - //TODO: remove when the way system credentials are handled is updated - // This contains ids for built-in "Use Credits for X" credentials - const hiddenCredentials = useMemo( - () => [ - "744fdc56-071a-4761-b5a5-0af0ce10a2b5", // Ollama - "fdb7f412-f519-48d1-9b5f-d2f73d0e01fe", // Revid - "760f84fc-b270-42de-91f6-08efe1b512d0", // Ideogram - "6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate - "53c25cb8-e3ee-465c-a4d1-e75a4c899c2a", // OpenAI - "24e5d942-d9e3-4798-8151-90143ee55629", // Anthropic - "4ec22295-8f97-4dd1-b42b-2c6957a02545", // Groq - "7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID - "7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina - "66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech - "b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router - "6c0f5bd0-9008-4638-9d79-4b40b631803e", // FAL - "96153e04-9c6c-4486-895f-5bb683b1ecec", // Exa - "78d19fd7-4d59-4a16-8277-3ce310acf2b7", // E2B - "96b83908-2789-4dec-9968-18f0ece4ceb3", // Nvidia - "ed55ac19-356e-4243-a6cb-bc599e9b716f", // Mem0 - ], - [], - ); - - if (isUserLoading) { - return ; - } - - if (!user || !supabase) { - router.push("/login"); - return null; - } - - const allCredentials = providers - ? Object.values(providers).flatMap((provider) => - [ - ...provider.savedOAuthCredentials, - ...provider.savedApiKeys, - ...provider.savedUserPasswordCredentials, - ] - .filter((cred) => !hiddenCredentials.includes(cred.id)) - .map((credentials) => ({ - ...credentials, - provider: provider.provider, - providerName: provider.providerName, - ProviderIcon: providerIcons[provider.provider], - TypeIcon: { - oauth2: IconUser, - api_key: IconKey, - user_password: IconKey, - }[credentials.type], - })), - ) - : []; - - return ( -
-
-

- Hello {user.email} -

- -
- -

Connections & Credentials

- - - - Provider - Name - Actions - - - - {allCredentials.map((cred) => ( - - -
- - {cred.providerName} -
-
- -
- - {cred.title || cred.username} -
- - { - { - oauth2: "OAuth2 credentials", - api_key: "API key", - user_password: "User password", - }[cred.type] - }{" "} - - {cred.id} - -
- - - -
- ))} -
-
- - - - - Are you sure? - - {confirmationDialogState.open && confirmationDialogState.message} - - - - - confirmationDialogState.open && - confirmationDialogState.onReject() - } - > - Cancel - - - confirmationDialogState.open && - confirmationDialogState.onConfirm() - } - > - Continue - - - - -
- ); -} diff --git a/autogpt_platform/frontend/src/components/agptui/Sidebar.tsx b/autogpt_platform/frontend/src/components/agptui/Sidebar.tsx index 637b3202a3..3a16fa211e 100644 --- a/autogpt_platform/frontend/src/components/agptui/Sidebar.tsx +++ b/autogpt_platform/frontend/src/components/agptui/Sidebar.tsx @@ -46,7 +46,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -56,7 +56,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => { {stripeAvailable && ( @@ -66,7 +66,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => { )} @@ -75,7 +75,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -84,7 +84,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -93,7 +93,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -110,7 +110,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -120,7 +120,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => { {stripeAvailable && ( @@ -130,7 +130,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => { )} @@ -139,7 +139,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -148,7 +148,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => {
@@ -157,7 +157,7 @@ export const Sidebar: React.FC = ({ linkGroups }) => { From 91df11b44ce63150f3afa7be45fc4726e19cc175 Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 14:19:33 -0600 Subject: [PATCH 03/12] ref(backend): use lowercase types Co-authored-by: Reinier van der Leer --- autogpt_platform/backend/backend/data/notifications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autogpt_platform/backend/backend/data/notifications.py b/autogpt_platform/backend/backend/data/notifications.py index 4e85efdf20..1a5283df14 100644 --- a/autogpt_platform/backend/backend/data/notifications.py +++ b/autogpt_platform/backend/backend/data/notifications.py @@ -141,7 +141,7 @@ class NotificationEventModel(BaseModel, Generic[T_co]): def get_data_type( notification_type: NotificationType, -) -> Type[BaseNotificationData]: +) -> type[BaseNotificationData]: return { NotificationType.AGENT_RUN: AgentRunData, NotificationType.ZERO_BALANCE: ZeroBalanceData, From 0e99bdc742a56e033a047dad221b76da7479c10d Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 14:20:04 -0600 Subject: [PATCH 04/12] ref(backend): smash migrations and apply changes to api --- autogpt_platform/backend/backend/data/user.py | 94 +++++++++++++++---- .../migration.sql | 13 --- .../migration.sql | 31 ------ .../migration.sql | 8 -- .../migration.sql | 71 ++++++++++++++ autogpt_platform/backend/schema.prisma | 38 +++++--- 6 files changed, 173 insertions(+), 82 deletions(-) delete mode 100644 autogpt_platform/backend/migrations/20250207203559_add_user_notification_preferences/migration.sql delete mode 100644 autogpt_platform/backend/migrations/20250210115850_add_notification_tables/migration.sql delete mode 100644 autogpt_platform/backend/migrations/20250210135633_add_unique_constraint_on_user_notification_batch/migration.sql create mode 100644 autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql diff --git a/autogpt_platform/backend/backend/data/user.py b/autogpt_platform/backend/backend/data/user.py index 80dbd5f173..f424cfc99c 100644 --- a/autogpt_platform/backend/backend/data/user.py +++ b/autogpt_platform/backend/backend/data/user.py @@ -5,11 +5,13 @@ from typing import Optional, cast from autogpt_libs.auth.models import DEFAULT_USER_ID from fastapi import HTTPException from prisma import Json +from prisma.enums import NotificationType from prisma.models import User from backend.data.db import prisma from backend.data.model import UserIntegrations, UserMetadata, UserMetadataRaw from backend.data.notifications import NotificationPreference +from backend.server.v2.store.exceptions import DatabaseError from backend.util.encryption import JSONCryptor logger = logging.getLogger(__name__) @@ -31,8 +33,13 @@ async def get_or_create_user(user_data: dict) -> User: "id": user_id, "email": user_email, "name": user_data.get("user_metadata", {}).get("name"), + "UserNotificationPreference": {"create": {"userId": user_id}}, } ) + if not user.UserNotificationPreference: + user.UserNotificationPreference = ( + await prisma.usernotificationpreference.create(data={"userId": user_id}) + ) return User.model_validate(user) @@ -159,19 +166,74 @@ async def get_active_users_ids() -> list[str]: async def get_user_notification_preference(user_id: str) -> NotificationPreference: - user = await User.prisma().find_unique_or_raise( - where={"id": user_id}, - include={ - "UserNotificationPreference": True, - }, - ) - notification_preference = NotificationPreference( - user_id=user.id, - email=user.email, - # TODO with the UI when it comes in - preferences={}, - daily_limit=3, - emails_sent_today=0, - last_reset_date=datetime.now(), - ) - return NotificationPreference.model_validate(notification_preference) + try: + user = await User.prisma().find_unique_or_raise( + where={"id": user_id}, + include={ + "UserNotificationPreference": True, + }, + ) + + # enable notifications by default if user has no notification preference (shouldn't ever happen though) + preferences: dict[NotificationType, bool] = { + NotificationType.AGENT_RUN: ( + user.UserNotificationPreference.notifyOnAgentRun + if user.UserNotificationPreference + else True + ), + NotificationType.ZERO_BALANCE: ( + user.UserNotificationPreference.notifyOnZeroBalance + if user.UserNotificationPreference + else True + ), + NotificationType.LOW_BALANCE: ( + user.UserNotificationPreference.notifyOnLowBalance + if user.UserNotificationPreference + else True + ), + NotificationType.BLOCK_EXECUTION_FAILED: ( + user.UserNotificationPreference.notifyOnBlockExecutionFailed + if user.UserNotificationPreference + else True + ), + NotificationType.CONTINUOUS_AGENT_ERROR: ( + user.UserNotificationPreference.notifyOnContinuousAgentError + if user.UserNotificationPreference + else True + ), + NotificationType.DAILY_SUMMARY: ( + user.UserNotificationPreference.notifyOnDailySummary + if user.UserNotificationPreference + else True + ), + NotificationType.WEEKLY_SUMMARY: ( + user.UserNotificationPreference.notifyOnWeeklySummary + if user.UserNotificationPreference + else True + ), + NotificationType.MONTHLY_SUMMARY: ( + user.UserNotificationPreference.notifyOnMonthlySummary + if user.UserNotificationPreference + else True + ), + } + daily_limit = ( + user.UserNotificationPreference.maxEmailsPerDay + if user.UserNotificationPreference + else 3 + ) + notification_preference = NotificationPreference( + user_id=user.id, + email=user.email, + preferences=preferences, + daily_limit=daily_limit, + # TODO with other changes later, for now we just will email them + emails_sent_today=0, + last_reset_date=datetime.now(), + ) + return NotificationPreference.model_validate(notification_preference) + + except Exception as e: + raise DatabaseError( + f"Failed to upsert user notification preference for user {user_id}: {e}" + ) from e diff --git a/autogpt_platform/backend/migrations/20250207203559_add_user_notification_preferences/migration.sql b/autogpt_platform/backend/migrations/20250207203559_add_user_notification_preferences/migration.sql deleted file mode 100644 index c9c9357930..0000000000 --- a/autogpt_platform/backend/migrations/20250207203559_add_user_notification_preferences/migration.sql +++ /dev/null @@ -1,13 +0,0 @@ --- CreateTable -CREATE TABLE "UserNotificationPreference" ( - "id" TEXT NOT NULL, - "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "userId" TEXT NOT NULL, - "preferences" JSONB NOT NULL DEFAULT '{}', - - CONSTRAINT "UserNotificationPreference_pkey" PRIMARY KEY ("id") -); - --- AddForeignKey -ALTER TABLE "UserNotificationPreference" ADD CONSTRAINT "UserNotificationPreference_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; \ No newline at end of file diff --git a/autogpt_platform/backend/migrations/20250210115850_add_notification_tables/migration.sql b/autogpt_platform/backend/migrations/20250210115850_add_notification_tables/migration.sql deleted file mode 100644 index 96938c6c39..0000000000 --- a/autogpt_platform/backend/migrations/20250210115850_add_notification_tables/migration.sql +++ /dev/null @@ -1,31 +0,0 @@ --- CreateEnum -CREATE TYPE "NotificationType" AS ENUM ('AGENT_RUN', 'ZERO_BALANCE', 'LOW_BALANCE', 'BLOCK_EXECUTION_FAILED', 'CONTINUOUS_AGENT_ERROR', 'DAILY_SUMMARY', 'WEEKLY_SUMMARY', 'MONTHLY_SUMMARY'); - --- CreateTable -CREATE TABLE "NotificationEvent" ( - "id" TEXT NOT NULL, - "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "userNotificationBatchId" TEXT, - "type" "NotificationType" NOT NULL, - "data" JSONB NOT NULL, - - CONSTRAINT "NotificationEvent_pkey" PRIMARY KEY ("id") -); - --- CreateTable -CREATE TABLE "UserNotificationBatch" ( - "id" TEXT NOT NULL, - "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, - "userId" TEXT NOT NULL, - "type" "NotificationType" NOT NULL, - - CONSTRAINT "UserNotificationBatch_pkey" PRIMARY KEY ("id") -); - --- AddForeignKey -ALTER TABLE "NotificationEvent" ADD CONSTRAINT "NotificationEvent_userNotificationBatchId_fkey" FOREIGN KEY ("userNotificationBatchId") REFERENCES "UserNotificationBatch"("id") ON DELETE SET NULL ON UPDATE CASCADE; - --- AddForeignKey -ALTER TABLE "UserNotificationBatch" ADD CONSTRAINT "UserNotificationBatch_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/migrations/20250210135633_add_unique_constraint_on_user_notification_batch/migration.sql b/autogpt_platform/backend/migrations/20250210135633_add_unique_constraint_on_user_notification_batch/migration.sql deleted file mode 100644 index b82fd37f76..0000000000 --- a/autogpt_platform/backend/migrations/20250210135633_add_unique_constraint_on_user_notification_batch/migration.sql +++ /dev/null @@ -1,8 +0,0 @@ -/* - Warnings: - - - A unique constraint covering the columns `[userId,type]` on the table `UserNotificationBatch` will be added. If there are existing duplicate values, this will fail. - -*/ --- CreateIndex -CREATE UNIQUE INDEX "UserNotificationBatch_userId_type_key" ON "UserNotificationBatch"("userId", "type"); diff --git a/autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql b/autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql new file mode 100644 index 0000000000..b8592f657d --- /dev/null +++ b/autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql @@ -0,0 +1,71 @@ +/* + Warnings: + + - A unique constraint covering the columns `[userNotificationPreferenceId]` on the table `User` will be added. If there are existing duplicate values, this will fail. + +*/ +-- CreateEnum +CREATE TYPE "NotificationType" AS ENUM ('AGENT_RUN', 'ZERO_BALANCE', 'LOW_BALANCE', 'BLOCK_EXECUTION_FAILED', 'CONTINUOUS_AGENT_ERROR', 'DAILY_SUMMARY', 'WEEKLY_SUMMARY', 'MONTHLY_SUMMARY'); + +-- AlterTable +ALTER TABLE "User" ADD COLUMN "userNotificationPreferenceId" TEXT; + +-- CreateTable +CREATE TABLE "NotificationEvent" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "userNotificationBatchId" TEXT, + "type" "NotificationType" NOT NULL, + "data" JSONB NOT NULL, + + CONSTRAINT "NotificationEvent_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "UserNotificationBatch" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "userId" TEXT NOT NULL, + "type" "NotificationType" NOT NULL, + + CONSTRAINT "UserNotificationBatch_pkey" PRIMARY KEY ("id") +); + +-- CreateTable +CREATE TABLE "UserNotificationPreference" ( + "id" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "userId" TEXT NOT NULL, + "maxEmailsPerDay" INTEGER NOT NULL DEFAULT 3, + "notifyOnAgentRun" BOOLEAN NOT NULL DEFAULT true, + "notifyOnZeroBalance" BOOLEAN NOT NULL DEFAULT true, + "notifyOnLowBalance" BOOLEAN NOT NULL DEFAULT true, + "notifyOnBlockExecutionFailed" BOOLEAN NOT NULL DEFAULT true, + "notifyOnContinuousAgentError" BOOLEAN NOT NULL DEFAULT true, + "notifyOnDailySummary" BOOLEAN NOT NULL DEFAULT true, + "notifyOnWeeklySummary" BOOLEAN NOT NULL DEFAULT true, + "notifyOnMonthlySummary" BOOLEAN NOT NULL DEFAULT true, + + CONSTRAINT "UserNotificationPreference_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "UserNotificationBatch_userId_type_key" ON "UserNotificationBatch"("userId", "type"); + +-- CreateIndex +CREATE UNIQUE INDEX "UserNotificationPreference_userId_key" ON "UserNotificationPreference"("userId"); + +-- CreateIndex +CREATE UNIQUE INDEX "User_userNotificationPreferenceId_key" ON "User"("userNotificationPreferenceId"); + +-- AddForeignKey +ALTER TABLE "User" ADD CONSTRAINT "User_userNotificationPreferenceId_fkey" FOREIGN KEY ("userNotificationPreferenceId") REFERENCES "UserNotificationPreference"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "NotificationEvent" ADD CONSTRAINT "NotificationEvent_userNotificationBatchId_fkey" FOREIGN KEY ("userNotificationBatchId") REFERENCES "UserNotificationBatch"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "UserNotificationBatch" ADD CONSTRAINT "UserNotificationBatch_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index 352c8be737..a82bb20e62 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -13,17 +13,19 @@ generator client { // User model to mirror Auth provider users model User { - id String @id // This should match the Supabase user ID - email String @unique - name String? - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt - metadata Json @default("{}") - integrations String @default("") - stripeCustomerId String? - topUpConfig Json? + id String @id // This should match the Supabase user ID + email String @unique + name String? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + metadata Json @default("{}") + integrations String @default("") + stripeCustomerId String? + topUpConfig Json? + userNotificationPreferenceId String? @unique // Relations + AgentGraphs AgentGraph[] AgentGraphExecutions AgentGraphExecution[] AnalyticsDetails AnalyticsDetails[] @@ -39,8 +41,8 @@ model User { StoreListingSubmission StoreListingSubmission[] APIKeys APIKey[] IntegrationWebhooks IntegrationWebhook[] - UserNotificationPreference UserNotificationPreference[] UserNotificationBatch UserNotificationBatch[] + UserNotificationPreference UserNotificationPreference? @relation(fields: [userNotificationPreferenceId], references: [id]) @@index([id]) @@index([email]) @@ -156,11 +158,19 @@ model UserNotificationPreference { createdAt DateTime @default(now()) updatedAt DateTime @default(now()) @updatedAt - userId String - user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique // Add @unique here + User User? - // Dict[NotificationType, bool] - preferences Json @default("{}") + maxEmailsPerDay Int @default(3) + + notifyOnAgentRun Boolean @default(true) + notifyOnZeroBalance Boolean @default(true) + notifyOnLowBalance Boolean @default(true) + notifyOnBlockExecutionFailed Boolean @default(true) + notifyOnContinuousAgentError Boolean @default(true) + notifyOnDailySummary Boolean @default(true) + notifyOnWeeklySummary Boolean @default(true) + notifyOnMonthlySummary Boolean @default(true) } // For the library page From e2441c52206b6f9b4c9d217ba862c1211c1e72f9 Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 14:23:00 -0600 Subject: [PATCH 05/12] Apply suggestions from code review Co-authored-by: Reinier van der Leer --- autogpt_platform/backend/backend/data/notifications.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/backend/backend/data/notifications.py b/autogpt_platform/backend/backend/data/notifications.py index 1a5283df14..6674a14324 100644 --- a/autogpt_platform/backend/backend/data/notifications.py +++ b/autogpt_platform/backend/backend/data/notifications.py @@ -205,10 +205,10 @@ class NotificationTypeOverride: class NotificationPreference(BaseModel): user_id: str email: EmailStr - preferences: dict[NotificationType, bool] = {} # Which notifications they want + preferences: dict[NotificationType, bool] = Field(default_factory=dict, description="Which notifications the user wants") daily_limit: int = 10 # Max emails per day emails_sent_today: int = 0 - last_reset_date: datetime = datetime.now() + last_reset_date: datetime = Field(default_factory=datetime.now) def get_batch_delay(notification_type: NotificationType) -> timedelta: From bf49a0a08ad53a481fea26b70cb2b8d0dd6e434a Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 14:26:33 -0600 Subject: [PATCH 06/12] Update autogpt_platform/backend/backend/data/notifications.py --- autogpt_platform/backend/backend/data/notifications.py | 1 - 1 file changed, 1 deletion(-) diff --git a/autogpt_platform/backend/backend/data/notifications.py b/autogpt_platform/backend/backend/data/notifications.py index 6674a14324..3cc5e2c0f3 100644 --- a/autogpt_platform/backend/backend/data/notifications.py +++ b/autogpt_platform/backend/backend/data/notifications.py @@ -235,7 +235,6 @@ async def create_or_add_to_user_notification_batch( ].model_validate_json(data) # Serialize the data - # serialized_data = json.dumps(notification_data.data.model_dump()) json_data: Json = Json(notification_data.data.model_dump_json()) # First try to find existing batch From 11cda467240bf24b75b1a615b753c53f1de0a730 Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 15:01:15 -0600 Subject: [PATCH 07/12] ref(backend): pr changes --- autogpt_platform/backend/backend/data/notifications.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/autogpt_platform/backend/backend/data/notifications.py b/autogpt_platform/backend/backend/data/notifications.py index 3cc5e2c0f3..ffab3b0fd3 100644 --- a/autogpt_platform/backend/backend/data/notifications.py +++ b/autogpt_platform/backend/backend/data/notifications.py @@ -1,7 +1,7 @@ import logging from datetime import datetime, timedelta from enum import Enum -from typing import Annotated, Generic, Optional, Type, TypeVar, Union +from typing import Annotated, Generic, Optional, TypeVar, Union from prisma import Json from prisma.enums import NotificationType @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) T_co = TypeVar("T_co", bound="BaseNotificationData", covariant=True) -class BatchingStrategy(str, Enum): +class BatchingStrategy(Enum): IMMEDIATE = "immediate" # Send right away (errors, critical notifications) HOURLY = "hourly" # Batch for up to an hour (usage reports) DAILY = "daily" # Daily digest (summary notifications) @@ -205,7 +205,9 @@ class NotificationTypeOverride: class NotificationPreference(BaseModel): user_id: str email: EmailStr - preferences: dict[NotificationType, bool] = Field(default_factory=dict, description="Which notifications the user wants") + preferences: dict[NotificationType, bool] = Field( + default_factory=dict, description="Which notifications the user wants" + ) daily_limit: int = 10 # Max emails per day emails_sent_today: int = 0 last_reset_date: datetime = Field(default_factory=datetime.now) From 957ebe697f86b5e13dbc02818953ea9bdefebb5e Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 15:08:33 -0600 Subject: [PATCH 08/12] refactor(backend): raise errors from db queries --- .../backend/backend/data/execution.py | 36 ++-- .../backend/backend/data/notifications.py | 184 ++++++++++-------- autogpt_platform/backend/backend/data/user.py | 28 +-- 3 files changed, 143 insertions(+), 105 deletions(-) diff --git a/autogpt_platform/backend/backend/data/execution.py b/autogpt_platform/backend/backend/data/execution.py index 15795ad945..f9bbccf864 100644 --- a/autogpt_platform/backend/backend/data/execution.py +++ b/autogpt_platform/backend/backend/data/execution.py @@ -15,6 +15,7 @@ from pydantic import BaseModel from backend.data.block import BlockData, BlockInput, CompletedBlockOutput from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE from backend.data.queue import AsyncRedisEventBus, RedisEventBus +from backend.server.v2.store.exceptions import DatabaseError from backend.util import json, mock from backend.util.settings import Config @@ -367,21 +368,26 @@ async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]: async def get_executions_in_timerange( user_id: str, start_time: str, end_time: str ) -> list[ExecutionResult]: - executions = await AgentGraphExecution.prisma().find_many( - where={ - "AND": [ - { - "startedAt": { - "gte": datetime.fromisoformat(start_time), - "lte": datetime.fromisoformat(end_time), - } - }, - {"userId": user_id}, - ] - }, - include=GRAPH_EXECUTION_INCLUDE, - ) - return [ExecutionResult.from_graph(execution) for execution in executions] + try: + executions = await AgentGraphExecution.prisma().find_many( + where={ + "AND": [ + { + "startedAt": { + "gte": datetime.fromisoformat(start_time), + "lte": datetime.fromisoformat(end_time), + } + }, + {"userId": user_id}, + ] + }, + include=GRAPH_EXECUTION_INCLUDE, + ) + return [ExecutionResult.from_graph(execution) for execution in executions] + except Exception as e: + raise DatabaseError( + f"Failed to get executions in timerange {start_time} to {end_time} for user {user_id}: {e}" + ) from e LIST_SPLIT = "_$_" diff --git a/autogpt_platform/backend/backend/data/notifications.py b/autogpt_platform/backend/backend/data/notifications.py index ffab3b0fd3..a4549de632 100644 --- a/autogpt_platform/backend/backend/data/notifications.py +++ b/autogpt_platform/backend/backend/data/notifications.py @@ -11,6 +11,8 @@ from prisma.types import UserNotificationBatchWhereInput # from backend.notifications.models import NotificationEvent from pydantic import BaseModel, EmailStr, Field, field_validator +from backend.server.v2.store.exceptions import DatabaseError + from .db import transaction logger = logging.getLogger(__name__) @@ -228,107 +230,131 @@ async def create_or_add_to_user_notification_batch( notification_type: NotificationType, data: str, # type: 'NotificationEventModel' ) -> dict: - logger.info( - f"Creating or adding to notification batch for {user_id} with type {notification_type} and data {data}" - ) + try: + logger.info( + f"Creating or adding to notification batch for {user_id} with type {notification_type} and data {data}" + ) - notification_data = NotificationEventModel[ - get_data_type(notification_type) - ].model_validate_json(data) + notification_data = NotificationEventModel[ + get_data_type(notification_type) + ].model_validate_json(data) - # Serialize the data - json_data: Json = Json(notification_data.data.model_dump_json()) + # Serialize the data + json_data: Json = Json(notification_data.data.model_dump_json()) - # First try to find existing batch - existing_batch = await UserNotificationBatch.prisma().find_unique( - where={ - "userId_type": { - "userId": user_id, - "type": notification_type, - } - }, - include={"notifications": True}, - ) - - if not existing_batch: - async with transaction() as tx: - notification_event = await tx.notificationevent.create( - data={ - "type": notification_type, - "data": json_data, - } - ) - - # Create new batch - resp = await tx.usernotificationbatch.create( - data={ + # First try to find existing batch + existing_batch = await UserNotificationBatch.prisma().find_unique( + where={ + "userId_type": { "userId": user_id, "type": notification_type, - "notifications": {"connect": [{"id": notification_event.id}]}, - }, - include={"notifications": True}, - ) - return resp.model_dump() - else: - async with transaction() as tx: - notification_event = await tx.notificationevent.create( - data={ - "type": notification_type, - "data": json_data, - "UserNotificationBatch": {"connect": {"id": existing_batch.id}}, } - ) - # Add to existing batch - resp = await tx.usernotificationbatch.update( - where={"id": existing_batch.id}, - data={"notifications": {"connect": [{"id": notification_event.id}]}}, - include={"notifications": True}, - ) - if not resp: - raise Exception("Failed to add to existing batch") - return resp.model_dump() + }, + include={"notifications": True}, + ) + + if not existing_batch: + async with transaction() as tx: + notification_event = await tx.notificationevent.create( + data={ + "type": notification_type, + "data": json_data, + } + ) + + # Create new batch + resp = await tx.usernotificationbatch.create( + data={ + "userId": user_id, + "type": notification_type, + "notifications": {"connect": [{"id": notification_event.id}]}, + }, + include={"notifications": True}, + ) + return resp.model_dump() + else: + async with transaction() as tx: + notification_event = await tx.notificationevent.create( + data={ + "type": notification_type, + "data": json_data, + "UserNotificationBatch": {"connect": {"id": existing_batch.id}}, + } + ) + # Add to existing batch + resp = await tx.usernotificationbatch.update( + where={"id": existing_batch.id}, + data={ + "notifications": {"connect": [{"id": notification_event.id}]} + }, + include={"notifications": True}, + ) + if not resp: + raise DatabaseError( + f"Failed to add notification event {notification_event.id} to existing batch {existing_batch.id}" + ) + return resp.model_dump() + except Exception as e: + raise DatabaseError( + f"Failed to create or add to notification batch for user {user_id} and type {notification_type}: {e}" + ) from e async def get_user_notification_last_message_in_batch( user_id: str, notification_type: NotificationType, ) -> NotificationEvent | None: - batch = await UserNotificationBatch.prisma().find_first( - where={"userId": user_id, "type": notification_type}, - order={"createdAt": "desc"}, - ) - if not batch: - return None - if not batch.notifications: - return None - return batch.notifications[-1] + try: + batch = await UserNotificationBatch.prisma().find_first( + where={"userId": user_id, "type": notification_type}, + order={"createdAt": "desc"}, + ) + if not batch: + return None + if not batch.notifications: + return None + return batch.notifications[-1] + except Exception as e: + raise DatabaseError( + f"Failed to get user notification last message in batch for user {user_id} and type {notification_type}: {e}" + ) from e async def empty_user_notification_batch( user_id: str, notification_type: NotificationType ) -> None: - async with transaction() as tx: - await tx.notificationevent.delete_many( - where={ - "UserNotificationBatch": { - "is": {"userId": user_id, "type": notification_type} + try: + async with transaction() as tx: + await tx.notificationevent.delete_many( + where={ + "UserNotificationBatch": { + "is": {"userId": user_id, "type": notification_type} + } } - } - ) - - await tx.usernotificationbatch.delete_many( - where=UserNotificationBatchWhereInput( - userId=user_id, - type=notification_type, ) - ) + + await tx.usernotificationbatch.delete_many( + where=UserNotificationBatchWhereInput( + userId=user_id, + type=notification_type, + ) + ) + except Exception as e: + raise DatabaseError( + f"Failed to empty user notification batch for user {user_id} and type {notification_type}: {e}" + ) from e async def get_user_notification_batch( user_id: str, notification_type: NotificationType, ) -> UserNotificationBatch | None: - return await UserNotificationBatch.prisma().find_first( - where={"userId": user_id, "type": notification_type}, - include={"notifications": True}, - ) + try: + return await UserNotificationBatch.prisma().find_first( + where={"userId": user_id, "type": notification_type}, + include={"notifications": True}, + ) + except Exception as e: + raise DatabaseError( + f"Failed to get user notification batch for user {user_id} and type {notification_type}: {e}" + ) from e diff --git a/autogpt_platform/backend/backend/data/user.py b/autogpt_platform/backend/backend/data/user.py index f424cfc99c..ce5740df65 100644 --- a/autogpt_platform/backend/backend/data/user.py +++ b/autogpt_platform/backend/backend/data/user.py @@ -142,19 +142,25 @@ async def migrate_and_encrypt_user_integrations(): async def get_active_user_ids_in_timerange(start_time: str, end_time: str) -> list[str]: - users = await User.prisma().find_many( - where={ - "AgentGraphExecutions": { - "some": { - "createdAt": { - "gte": datetime.fromisoformat(start_time), - "lte": datetime.fromisoformat(end_time), + try: + users = await User.prisma().find_many( + where={ + "AgentGraphExecutions": { + "some": { + "createdAt": { + "gte": datetime.fromisoformat(start_time), + "lte": datetime.fromisoformat(end_time), + } } } - } - }, - ) - return [user.id for user in users] + }, + ) + return [user.id for user in users] + + except Exception as e: + raise DatabaseError( + f"Failed to get active user ids in timerange {start_time} to {end_time}: {e}" + ) from e async def get_active_users_ids() -> list[str]: From 1626bf9e161f553f98cf3d312e3cded518785e16 Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Wed, 12 Feb 2025 03:02:00 +0100 Subject: [PATCH 09/12] fix(backend): Support Python 3.10 (#9468) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Resolves #9467 ### Changes 🏗️ - Loosen Python version requirement to include v3.10 Also, fixed a few issues in pyproject.toml: - Re-sort dependency list - Update `autogpt-platform-backend` package version to match latest release --- autogpt_platform/backend/poetry.lock | 87 ++++++++++++++++++++++--- autogpt_platform/backend/pyproject.toml | 48 +++++++------- 2 files changed, 102 insertions(+), 33 deletions(-) diff --git a/autogpt_platform/backend/poetry.lock b/autogpt_platform/backend/poetry.lock index 0f4c8da051..bda48bfd36 100644 --- a/autogpt_platform/backend/poetry.lock +++ b/autogpt_platform/backend/poetry.lock @@ -118,6 +118,7 @@ files = [ [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -208,6 +209,7 @@ files = [ ] [package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} @@ -354,6 +356,8 @@ mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -780,11 +784,12 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] +markers = {dev = "python_version < \"3.11\""} [package.extras] test = ["pytest (>=6)"] @@ -990,8 +995,14 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} -grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio = [ + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1154,7 +1165,10 @@ google-cloud-audit-log = ">=0.2.4,<1.0.0dev" google-cloud-core = ">=2.0.0,<3.0.0dev" grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" opentelemetry-api = ">=1.9.0" -proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} +proto-plus = [ + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, +] protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" [[package]] @@ -1291,14 +1305,14 @@ pydantic = ">=1.10,<3" [[package]] name = "gravitasml" -version = "0.1.2" +version = "0.1.3" description = "" optional = false -python-versions = "<3.13,>=3.11" +python-versions = "<4.0,>=3.10" groups = ["main"] files = [ - {file = "gravitasml-0.1.2-py3-none-any.whl", hash = "sha256:eafa5a65a6f952f0d23c54762f5fcb654b3f000ede389519a7dc3c3179801121"}, - {file = "gravitasml-0.1.2.tar.gz", hash = "sha256:bbb651a9d5be669d47cc5bd7fbf19d55ce277b3dd9966cc37d2ec0ae042aab25"}, + {file = "gravitasml-0.1.3-py3-none-any.whl", hash = "sha256:51ff98b4564b7a61f7796f18d5f2558b919d30b3722579296089645b7bc18b85"}, + {file = "gravitasml-0.1.3.tar.gz", hash = "sha256:04d240b9fa35878252d57a36032130b6516487468847fcdced1022c032a20f57"}, ] [package.dependencies] @@ -2315,6 +2329,9 @@ files = [ {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -2748,6 +2765,7 @@ files = [ [package.dependencies] pastel = ">=0.2.1,<0.3.0" pyyaml = ">=6.0.2,<7.0.0" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} [package.extras] poetry-plugin = ["poetry (>=1.0,<3.0)"] @@ -2788,6 +2806,7 @@ files = [ deprecation = ">=2.1.0,<3.0.0" httpx = {version = ">=0.26,<0.29", extras = ["http2"]} pydantic = ">=1.9,<3.0" +strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""} [[package]] name = "posthog" @@ -2878,6 +2897,7 @@ jinja2 = ">=2.11.2" nodeenv = "*" pydantic = ">=1.10.0,<3" python-dotenv = ">=0.12.0" +StrEnum = {version = "*", markers = "python_version < \"3.11\""} tomlkit = "*" typing-extensions = ">=4.5.0" @@ -3454,9 +3474,11 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -3511,6 +3533,7 @@ files = [ ] [package.dependencies] +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} watchdog = ">=2.0.0" [[package]] @@ -4322,6 +4345,49 @@ files = [ [package.dependencies] requests = ">=2.32.3,<3.0.0" +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + [[package]] name = "tomlkit" version = "0.13.2" @@ -4492,6 +4558,7 @@ h11 = ">=0.8" httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -5016,5 +5083,5 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" -python-versions = ">=3.11,<3.13" -content-hash = "cf3242137be7977795e01a61c058ac01202a1b6e0a03172a22894eb652ff2aab" +python-versions = ">=3.10,<3.13" +content-hash = "a3af2c13c3fce626006c6469263e44cc7f1b8c26e3d0a6beabd1b33caac35128" diff --git a/autogpt_platform/backend/pyproject.toml b/autogpt_platform/backend/pyproject.toml index dab7f75e6d..248c5a1469 100644 --- a/autogpt_platform/backend/pyproject.toml +++ b/autogpt_platform/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "autogpt-platform-backend" -version = "0.3.4" +version = "0.4.9" description = "A platform for building AI-powered agentic workflows" authors = ["AutoGPT "] readme = "README.md" @@ -8,12 +8,13 @@ packages = [{ include = "backend", format = "sdist" }] [tool.poetry.dependencies] -python = ">=3.11,<3.13" +python = ">=3.10,<3.13" aio-pika = "^9.5.4" anthropic = "^0.45.2" apscheduler = "^3.11.0" autogpt-libs = { path = "../autogpt_libs", develop = true } click = "^8.1.7" +cryptography = "^43.0" discord-py = "^2.4.0" e2b-code-interpreter = "^1.0.5" fastapi = "^0.115.8" @@ -21,58 +22,59 @@ feedparser = "^6.0.11" flake8 = "^7.0.0" google-api-python-client = "^2.160.0" google-auth-oauthlib = "^1.2.1" +google-cloud-storage = "^3.0.0" +googlemaps = "^4.10.0" +gravitasml = "^0.1.3" groq = "^0.18.0" jinja2 = "^3.1.4" jsonref = "^1.1.0" jsonschema = "^4.22.0" +launchdarkly-server-sdk = "^9.8.0" +mem0ai = "^0.1.48" +moviepy = "^2.1.2" ollama = "^0.4.1" openai = "^1.61.1" +pika = "^1.3.2" +pinecone = "^5.3.1" praw = "~7.8.1" prisma = "^0.15.0" psutil = "^6.1.0" +psycopg2-binary = "^2.9.10" pydantic = "^2.9.2" pydantic-settings = "^2.3.4" pyro5 = "^5.15" pytest = "^8.2.1" pytest-asyncio = "^0.25.3" python-dotenv = "^1.0.1" +python-multipart = "^0.0.20" redis = "^5.2.0" +replicate = "^1.0.4" sentry-sdk = "2.20.0" +sqlalchemy = "^2.0.36" strenum = "^0.4.9" stripe = "^11.5.0" supabase = "2.13.0" tenacity = "^9.0.0" +todoist-api-python = "^2.1.7" tweepy = "^4.14.0" uvicorn = { extras = ["standard"], version = "^0.34.0" } websockets = "^13.1" youtube-transcript-api = "^0.6.2" -googlemaps = "^4.10.0" -replicate = "^1.0.4" -pinecone = "^5.3.1" -cryptography = "^43.0" -python-multipart = "^0.0.20" -sqlalchemy = "^2.0.36" -psycopg2-binary = "^2.9.10" -google-cloud-storage = "^3.0.0" -launchdarkly-server-sdk = "^9.8.0" -mem0ai = "^0.1.48" -todoist-api-python = "^2.1.7" -moviepy = "^2.1.2" -gravitasml = "0.1.2" -pika = "^1.3.2" +# NOTE: please insert new dependencies in their alphabetical location [tool.poetry.group.dev.dependencies] -poethepoet = "^0.32.1" +aiohappyeyeballs = "^2.4.4" +black = "^24.10.0" +faker = "^33.3.1" httpx = "^0.27.0" +isort = "^5.13.2" +poethepoet = "^0.32.1" +pyright = "^1.1.392" +pytest-mock = "^3.14.0" pytest-watcher = "^0.4.2" requests = "^2.32.3" ruff = "^0.9.2" -pyright = "^1.1.392" -isort = "^5.13.2" -black = "^24.10.0" -aiohappyeyeballs = "^2.4.4" -pytest-mock = "^3.14.0" -faker = "^33.3.1" +# NOTE: please insert new dependencies in their alphabetical location [build-system] requires = ["poetry-core"] From c69df5cb799ce77f63761c2045d29745cad0bace Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 20:30:33 -0600 Subject: [PATCH 10/12] ref(backend): update from pr changes --- autogpt_platform/backend/backend/data/user.py | 45 ++++++++++--------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/autogpt_platform/backend/backend/data/user.py b/autogpt_platform/backend/backend/data/user.py index ce5740df65..c85d272c56 100644 --- a/autogpt_platform/backend/backend/data/user.py +++ b/autogpt_platform/backend/backend/data/user.py @@ -18,29 +18,34 @@ logger = logging.getLogger(__name__) async def get_or_create_user(user_data: dict) -> User: - user_id = user_data.get("sub") - if not user_id: - raise HTTPException(status_code=401, detail="User ID not found in token") + try: + user_id = user_data.get("sub") + if not user_id: + raise HTTPException(status_code=401, detail="User ID not found in token") - user_email = user_data.get("email") - if not user_email: - raise HTTPException(status_code=401, detail="Email not found in token") + user_email = user_data.get("email") + if not user_email: + raise HTTPException(status_code=401, detail="Email not found in token") - user = await prisma.user.find_unique(where={"id": user_id}) - if not user: - user = await prisma.user.create( - data={ - "id": user_id, - "email": user_email, - "name": user_data.get("user_metadata", {}).get("name"), - "UserNotificationPreference": {"create": {"userId": user_id}}, - } + user = await prisma.user.find_unique( + where={"id": user_id}, include={"UserNotificationPreference": True} ) - if not user.UserNotificationPreference: - user.UserNotificationPreference = ( - await prisma.usernotificationpreference.create(data={"userId": user_id}) - ) - return User.model_validate(user) + if not user: + user = await prisma.user.create( + data={ + "id": user_id, + "email": user_email, + "name": user_data.get("user_metadata", {}).get("name"), + "UserNotificationPreference": {"create": {"userId": user_id}}, + } + ) + if not user.UserNotificationPreference: + user.UserNotificationPreference = ( + await prisma.usernotificationpreference.create(data={"userId": user_id}) + ) + return User.model_validate(user) + except Exception as e: + raise DatabaseError(f"Failed to get or create user {user_data}: {e}") from e async def get_user_by_id(user_id: str) -> User: From 85f8f4136f75ee242bd05aa97bac3f5278e4338f Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 20:33:49 -0600 Subject: [PATCH 11/12] fix(backend): relock --- autogpt_platform/backend/poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autogpt_platform/backend/poetry.lock b/autogpt_platform/backend/poetry.lock index 65d1bcf7da..cf1e3c977b 100644 --- a/autogpt_platform/backend/poetry.lock +++ b/autogpt_platform/backend/poetry.lock @@ -5122,4 +5122,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.13" -content-hash = "a3af2c13c3fce626006c6469263e44cc7f1b8c26e3d0a6beabd1b33caac35128" +content-hash = "4052d96f95ad3dbf8bef4d651168f6df1ef21c506f152ddca119ad8f23caf159" From ddd2b9de151db15d7f2134c20fd3901b5d1ac685 Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 11 Feb 2025 21:09:51 -0600 Subject: [PATCH 12/12] fix(backend): the tests did dumb stuff like reusing ids --- autogpt_platform/backend/backend/data/user.py | 4 ++-- .../migration.sql | 2 +- autogpt_platform/backend/schema.prisma | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) rename autogpt_platform/backend/migrations/{20250211201352_add_notiifcations_tables_and_config => 20250212030731_add_user_notifications}/migration.sql (97%) diff --git a/autogpt_platform/backend/backend/data/user.py b/autogpt_platform/backend/backend/data/user.py index c85d272c56..77c0b70157 100644 --- a/autogpt_platform/backend/backend/data/user.py +++ b/autogpt_platform/backend/backend/data/user.py @@ -28,7 +28,7 @@ async def get_or_create_user(user_data: dict) -> User: raise HTTPException(status_code=401, detail="Email not found in token") user = await prisma.user.find_unique( - where={"id": user_id}, include={"UserNotificationPreference": True} + where={"id": user_id} ) if not user: user = await prisma.user.create( @@ -39,7 +39,7 @@ async def get_or_create_user(user_data: dict) -> User: "UserNotificationPreference": {"create": {"userId": user_id}}, } ) - if not user.UserNotificationPreference: + if not user.userNotificationPreferenceId: user.UserNotificationPreference = ( await prisma.usernotificationpreference.create(data={"userId": user_id}) ) diff --git a/autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql b/autogpt_platform/backend/migrations/20250212030731_add_user_notifications/migration.sql similarity index 97% rename from autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql rename to autogpt_platform/backend/migrations/20250212030731_add_user_notifications/migration.sql index b8592f657d..f40dce3f17 100644 --- a/autogpt_platform/backend/migrations/20250211201352_add_notiifcations_tables_and_config/migration.sql +++ b/autogpt_platform/backend/migrations/20250212030731_add_user_notifications/migration.sql @@ -62,7 +62,7 @@ CREATE UNIQUE INDEX "UserNotificationPreference_userId_key" ON "UserNotification CREATE UNIQUE INDEX "User_userNotificationPreferenceId_key" ON "User"("userNotificationPreferenceId"); -- AddForeignKey -ALTER TABLE "User" ADD CONSTRAINT "User_userNotificationPreferenceId_fkey" FOREIGN KEY ("userNotificationPreferenceId") REFERENCES "UserNotificationPreference"("id") ON DELETE SET NULL ON UPDATE CASCADE; +ALTER TABLE "User" ADD CONSTRAINT "User_userNotificationPreferenceId_fkey" FOREIGN KEY ("userNotificationPreferenceId") REFERENCES "UserNotificationPreference"("id") ON DELETE CASCADE ON UPDATE CASCADE; -- AddForeignKey ALTER TABLE "NotificationEvent" ADD CONSTRAINT "NotificationEvent_userNotificationBatchId_fkey" FOREIGN KEY ("userNotificationBatchId") REFERENCES "UserNotificationBatch"("id") ON DELETE SET NULL ON UPDATE CASCADE; diff --git a/autogpt_platform/backend/schema.prisma b/autogpt_platform/backend/schema.prisma index a82bb20e62..d463390fbc 100644 --- a/autogpt_platform/backend/schema.prisma +++ b/autogpt_platform/backend/schema.prisma @@ -42,7 +42,7 @@ model User { APIKeys APIKey[] IntegrationWebhooks IntegrationWebhook[] UserNotificationBatch UserNotificationBatch[] - UserNotificationPreference UserNotificationPreference? @relation(fields: [userNotificationPreferenceId], references: [id]) + UserNotificationPreference UserNotificationPreference? @relation(fields: [userNotificationPreferenceId], references: [id], onDelete: Cascade) @@index([id]) @@index([email])