diff --git a/.gitattributes b/.gitattributes index c025026c54..02c46c69cf 100644 --- a/.gitattributes +++ b/.gitattributes @@ -6,3 +6,5 @@ docs/_javascript/** linguist-vendored # Exclude VCR cassettes from stats forge/tests/vcr_cassettes/**/**.y*ml linguist-generated + +* text=auto \ No newline at end of file diff --git a/.github/workflows/autogpt-server-ci.yml b/.github/workflows/autogpt-server-ci.yml index a996ccc0c4..0551ae6e70 100644 --- a/.github/workflows/autogpt-server-ci.yml +++ b/.github/workflows/autogpt-server-ci.yml @@ -39,8 +39,8 @@ jobs: if: matrix.db-platform == 'postgres' uses: ikalnytskyi/action-setup-postgres@v6 with: - username: ${{ secrets.DB_USER }} - password: ${{ secrets.DB_PASS }} + username: ${{ secrets.DB_USER || 'postgres' }} + password: ${{ secrets.DB_PASS || 'postgres' }} database: postgres port: 5432 id: postgres @@ -145,13 +145,13 @@ jobs: CI: true PLAIN_OUTPUT: True OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - DB_USER: ${{ secrets.DB_USER }} - DB_PASS: ${{ secrets.DB_PASS }} + DB_USER: ${{ secrets.DB_USER || 'postgres' }} + DB_PASS: ${{ secrets.DB_PASS || 'postgres' }} DB_NAME: postgres DB_PORT: 5432 RUN_ENV: local PORT: 8080 - DATABASE_URL: postgresql://${{ secrets.DB_USER }}:${{ secrets.DB_PASS }}@localhost:5432/${{ secrets.DB_NAME }} + DATABASE_URL: postgresql://${{ secrets.DB_USER || 'postgres' }}:${{ secrets.DB_PASS || 'postgres' }}@localhost:5432/${{ secrets.DB_NAME || 'postgres'}} # - name: Upload coverage reports to Codecov # uses: codecov/codecov-action@v4 diff --git a/docs/content/server/new_blocks.md b/docs/content/server/new_blocks.md index 55b8d139bb..40c02f028e 100644 --- a/docs/content/server/new_blocks.md +++ b/docs/content/server/new_blocks.md @@ -215,4 +215,10 @@ If you would like to implement one of these blocks, open a pull request and we w - Read / Get most read books in a given month, year, etc from GoodReads or Amazon Books, etc - Get dates for specific shows across all streaming services - Suggest/Recommend/Get most watched shows in a given month, year, etc across all streaming platforms +- Data analysis from xlsx data set + - Gather via Excel or Google Sheets data > Sample the data randomly (sample block takes top X, bottom X, randomly, etc) > pass that to LLM Block to generate a script for analysis of the full data > Python block to run the script> making a loop back through LLM Fix Block on error > create chart/visualization (potentially in the code block?) > show the image as output (this may require frontend changes to show) +- Tiktok video search and download +### Marketing + +- Portfolio site design and enhancements diff --git a/rnd/autogpt_builder/package.json b/rnd/autogpt_builder/package.json index f10491256f..60d051f37a 100644 --- a/rnd/autogpt_builder/package.json +++ b/rnd/autogpt_builder/package.json @@ -18,6 +18,7 @@ "@radix-ui/react-icons": "^1.3.0", "@radix-ui/react-label": "^2.1.0", "@radix-ui/react-popover": "^1.1.1", + "@radix-ui/react-select": "^2.1.1", "@radix-ui/react-scroll-area": "^1.1.0", "@radix-ui/react-separator": "^1.1.0", "@radix-ui/react-slot": "^1.1.0", diff --git a/rnd/autogpt_builder/src/app/layout.tsx b/rnd/autogpt_builder/src/app/layout.tsx index c9483a8e2b..2dc2c230fd 100644 --- a/rnd/autogpt_builder/src/app/layout.tsx +++ b/rnd/autogpt_builder/src/app/layout.tsx @@ -6,6 +6,7 @@ import { NavBar } from "@/components/NavBar"; import { cn } from "@/lib/utils"; import "./globals.css"; +import TallyPopupSimple from "@/components/TallyPopup"; const inter = Inter({ subsets: ["latin"] }); @@ -32,6 +33,7 @@ export default function RootLayout({
{children}
+
diff --git a/rnd/autogpt_builder/src/app/monitor/page.tsx b/rnd/autogpt_builder/src/app/monitor/page.tsx index 2a7f7e9983..c88b386094 100644 --- a/rnd/autogpt_builder/src/app/monitor/page.tsx +++ b/rnd/autogpt_builder/src/app/monitor/page.tsx @@ -1,66 +1,20 @@ "use client"; import React, { useEffect, useState } from "react"; -import Link from "next/link"; -import moment from "moment"; -import { - ComposedChart, - DefaultLegendContentProps, - Legend, - Line, - ResponsiveContainer, - Scatter, - Tooltip, - XAxis, - YAxis, -} from "recharts"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuLabel, - DropdownMenuRadioGroup, - DropdownMenuRadioItem, - DropdownMenuSeparator, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; + import AutoGPTServerAPI, { - Graph, GraphMeta, NodeExecutionResult, - safeCopyGraph, } from "@/lib/autogpt-server-api"; + +import { Card } from "@/components/ui/card"; +import { FlowRun } from "@/lib/types"; import { - ChevronDownIcon, - ClockIcon, - EnterIcon, - ExitIcon, - Pencil2Icon, -} from "@radix-ui/react-icons"; -import { cn, exportAsJSONFile, hashString } from "@/lib/utils"; -import { Badge } from "@/components/ui/badge"; -import { Button, buttonVariants } from "@/components/ui/button"; -import { Calendar } from "@/components/ui/calendar"; -import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; -import { - Popover, - PopoverContent, - PopoverTrigger, -} from "@/components/ui/popover"; -import { - Dialog, - DialogContent, - DialogHeader, - DialogTrigger, -} from "@/components/ui/dialog"; -import { - Table, - TableBody, - TableCell, - TableHead, - TableHeader, - TableRow, -} from "@/components/ui/table"; -import { AgentImportForm } from "@/components/agent-import-form"; + AgentFlowList, + FlowInfo, + FlowRunInfo, + FlowRunsList, + FlowRunsStats, +} from "@/components/monitor"; const Monitor = () => { const [flows, setFlows] = useState([]); @@ -165,19 +119,6 @@ const Monitor = () => { ); }; -type FlowRun = { - id: string; - graphID: string; - graphVersion: number; - status: "running" | "waiting" | "success" | "failed"; - startTime: number; // unix timestamp (ms) - endTime: number; // unix timestamp (ms) - duration: number; // seconds - totalRunTime: number; // seconds - - nodeExecutionResults: NodeExecutionResult[]; -}; - function flowRunFromNodeExecutionResults( nodeExecutionResults: NodeExecutionResult[], ): FlowRun { @@ -230,664 +171,4 @@ function flowRunFromNodeExecutionResults( }; } -const AgentFlowList = ({ - flows, - flowRuns, - selectedFlow, - onSelectFlow, - className, -}: { - flows: GraphMeta[]; - flowRuns?: FlowRun[]; - selectedFlow: GraphMeta | null; - onSelectFlow: (f: GraphMeta) => void; - className?: string; -}) => { - const [templates, setTemplates] = useState([]); - const api = new AutoGPTServerAPI(); - useEffect(() => { - api.listTemplates().then((templates) => setTemplates(templates)); - }, []); - - return ( - - - Agents - -
- {/* Split "Create" button */} - - - {/* https://ui.shadcn.com/docs/components/dialog#notes */} - - - - - - - - - Import from file - - - {templates.length > 0 && ( - <> - {/* List of templates */} - - Use a template - {templates.map((template) => ( - { - api - .createGraph(template.id, template.version) - .then((newGraph) => { - window.location.href = `/build?flowID=${newGraph.id}`; - }); - }} - > - {template.name} - - ))} - - )} - - - - - - Import an Agent (template) from a file - - - - -
-
- - - - - - Name - {/* Status */} - {/* Last updated */} - {flowRuns && ( - - # of runs - - )} - {flowRuns && Last run} - - - - {flows - .map((flow) => { - let runCount = 0, - lastRun: FlowRun | null = null; - if (flowRuns) { - const _flowRuns = flowRuns.filter( - (r) => r.graphID == flow.id, - ); - runCount = _flowRuns.length; - lastRun = - runCount == 0 - ? null - : _flowRuns.reduce((a, c) => - a.startTime > c.startTime ? a : c, - ); - } - return { flow, runCount, lastRun }; - }) - .sort((a, b) => { - if (!a.lastRun && !b.lastRun) return 0; - if (!a.lastRun) return 1; - if (!b.lastRun) return -1; - return b.lastRun.startTime - a.lastRun.startTime; - }) - .map(({ flow, runCount, lastRun }) => ( - onSelectFlow(flow)} - data-state={selectedFlow?.id == flow.id ? "selected" : null} - > - {flow.name} - {/* */} - {/* - {flow.updatedAt ?? "???"} - */} - {flowRuns && ( - - {runCount} - - )} - {flowRuns && - (!lastRun ? ( - - ) : ( - - {moment(lastRun.startTime).fromNow()} - - ))} - - ))} - -
-
-
- ); -}; - -const FlowStatusBadge = ({ - status, -}: { - status: "active" | "disabled" | "failing"; -}) => ( - - {status} - -); - -const FlowRunsList: React.FC<{ - flows: GraphMeta[]; - runs: FlowRun[]; - className?: string; - selectedRun?: FlowRun | null; - onSelectRun: (r: FlowRun) => void; -}> = ({ flows, runs, selectedRun, onSelectRun, className }) => ( - - - Runs - - - - - - Agent - Started - Status - Duration - - - - {runs.map((run) => ( - onSelectRun(run)} - data-state={selectedRun?.id == run.id ? "selected" : null} - > - - {flows.find((f) => f.id == run.graphID)!.name} - - {moment(run.startTime).format("HH:mm")} - - - - {formatDuration(run.duration)} - - ))} - -
-
-
-); - -const FlowRunStatusBadge: React.FC<{ - status: FlowRun["status"]; - className?: string; -}> = ({ status, className }) => ( - - {status} - -); - -const FlowInfo: React.FC< - React.HTMLAttributes & { - flow: GraphMeta; - flowRuns: FlowRun[]; - flowVersion?: number | "all"; - } -> = ({ flow, flowRuns, flowVersion, ...props }) => { - const api = new AutoGPTServerAPI(); - - const [flowVersions, setFlowVersions] = useState(null); - const [selectedVersion, setSelectedFlowVersion] = useState( - flowVersion ?? "all", - ); - const selectedFlowVersion: Graph | undefined = flowVersions?.find( - (v) => - v.version == (selectedVersion == "all" ? flow.version : selectedVersion), - ); - - useEffect(() => { - api.getGraphAllVersions(flow.id).then((result) => setFlowVersions(result)); - }, [flow.id]); - - return ( - - -
- - {flow.name} v{flow.version} - -

- Agent ID: {flow.id} -

-
-
- {(flowVersions?.length ?? 0) > 1 && ( - - - - - - Choose a version - - - setSelectedFlowVersion( - choice == "all" ? choice : Number(choice), - ) - } - > - - All versions - - {flowVersions?.map((v) => ( - - Version {v.version} - {v.is_active ? " (active)" : ""} - - ))} - - - - )} - - Edit - - -
-
- - - r.graphID == flow.id && - (selectedVersion == "all" || r.graphVersion == selectedVersion), - )} - /> - -
- ); -}; - -const FlowRunInfo: React.FC< - React.HTMLAttributes & { - flow: GraphMeta; - flowRun: FlowRun; - } -> = ({ flow, flowRun, ...props }) => { - if (flowRun.graphID != flow.id) { - throw new Error( - `FlowRunInfo can't be used with non-matching flowRun.flowID and flow.id`, - ); - } - - return ( - - -
- - {flow.name} v{flow.version} - -

- Agent ID: {flow.id} -

-

- Run ID: {flowRun.id} -

-
- - Edit Agent - -
- -

- Status:{" "} - -

-

- Started:{" "} - {moment(flowRun.startTime).format("YYYY-MM-DD HH:mm:ss")} -

-

- Finished:{" "} - {moment(flowRun.endTime).format("YYYY-MM-DD HH:mm:ss")} -

-

- Duration (run time): {flowRun.duration} ( - {flowRun.totalRunTime}) seconds -

- {/*

Total cost: €1,23

*/} -
-
- ); -}; - -const FlowRunsStats: React.FC<{ - flows: GraphMeta[]; - flowRuns: FlowRun[]; - title?: string; - className?: string; -}> = ({ flows, flowRuns, title, className }) => { - /* "dateMin": since the first flow in the dataset - * number > 0: custom date (unix timestamp) - * number < 0: offset relative to Date.now() (in seconds) */ - const [statsSince, setStatsSince] = useState(-24 * 3600); - const statsSinceTimestamp = // unix timestamp or null - typeof statsSince == "string" - ? null - : statsSince < 0 - ? Date.now() + statsSince * 1000 - : statsSince; - const filteredFlowRuns = - statsSinceTimestamp != null - ? flowRuns.filter((fr) => fr.startTime > statsSinceTimestamp) - : flowRuns; - - return ( -
-
- {title || "Stats"} -
- - - - - - - - - - - setStatsSince(selectedDay.getTime()) - } - initialFocus - /> - - - -
-
- -
-
-

- Total runs: {filteredFlowRuns.length} -

-

- Total run time:{" "} - {filteredFlowRuns.reduce((total, run) => total + run.totalRunTime, 0)}{" "} - seconds -

- {/*

Total cost: €1,23

*/} -
-
- ); -}; - -const FlowRunsTimeline = ({ - flows, - flowRuns, - dataMin, - className, -}: { - flows: GraphMeta[]; - flowRuns: FlowRun[]; - dataMin: "dataMin" | number; - className?: string; -}) => ( - /* TODO: make logarithmic? */ - - - { - const now = moment(); - const time = moment(unixTime); - return now.diff(time, "hours") < 24 - ? time.format("HH:mm") - : time.format("YYYY-MM-DD HH:mm"); - }} - name="Time" - scale="time" - /> - (s > 90 ? `${Math.round(s / 60)}m` : `${s}s`)} - /> - { - if (payload && payload.length) { - const data: FlowRun & { time: number; _duration: number } = - payload[0].payload; - const flow = flows.find((f) => f.id === data.graphID); - return ( - -

- Agent: {flow ? flow.name : "Unknown"} -

-

- Status:  - -

-

- Started:{" "} - {moment(data.startTime).format("YYYY-MM-DD HH:mm:ss")} -

-

- Duration / run time:{" "} - {formatDuration(data.duration)} /{" "} - {formatDuration(data.totalRunTime)} -

-
- ); - } - return null; - }} - /> - {flows.map((flow) => ( - fr.graphID == flow.id) - .map((fr) => ({ - ...fr, - time: fr.startTime + fr.totalRunTime * 1000, - _duration: fr.totalRunTime, - }))} - name={flow.name} - fill={`hsl(${(hashString(flow.id) * 137.5) % 360}, 70%, 50%)`} - /> - ))} - {flowRuns.map((run) => ( - - ))} - } - wrapperStyle={{ - bottom: 0, - left: 0, - right: 0, - width: "100%", - display: "flex", - justifyContent: "center", - }} - /> -
-
-); - -const ScrollableLegend: React.FC< - DefaultLegendContentProps & { className?: string } -> = ({ payload, className }) => { - return ( -
- {payload.map((entry, index) => { - if (entry.type == "none") return; - return ( - - - {entry.value} - - ); - })} -
- ); -}; - -function formatDuration(seconds: number): string { - return ( - (seconds < 100 ? seconds.toPrecision(2) : Math.round(seconds)).toString() + - "s" - ); -} - export default Monitor; diff --git a/rnd/autogpt_builder/src/components/CustomEdge.tsx b/rnd/autogpt_builder/src/components/CustomEdge.tsx index 25ac6c329f..137fdea8d4 100644 --- a/rnd/autogpt_builder/src/components/CustomEdge.tsx +++ b/rnd/autogpt_builder/src/components/CustomEdge.tsx @@ -32,9 +32,8 @@ const CustomEdgeFC: FC> = ({ const [isHovered, setIsHovered] = useState(false); const { setEdges } = useReactFlow(); - const onEdgeClick = () => { + const onEdgeRemoveClick = () => { setEdges((edges) => edges.filter((edge) => edge.id !== id)); - data.clearNodesStatusAndOutput(); }; const [path, labelX, labelY] = getBezierPath({ @@ -105,7 +104,7 @@ const CustomEdgeFC: FC> = ({ onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} className={`edge-label-button ${isHovered ? "visible" : ""}`} - onClick={onEdgeClick} + onClick={onEdgeRemoveClick} > diff --git a/rnd/autogpt_builder/src/components/CustomNode.tsx b/rnd/autogpt_builder/src/components/CustomNode.tsx index 63aeb222a8..d6b93cb14a 100644 --- a/rnd/autogpt_builder/src/components/CustomNode.tsx +++ b/rnd/autogpt_builder/src/components/CustomNode.tsx @@ -15,13 +15,16 @@ import { BlockIORootSchema, NodeExecutionResult, } from "@/lib/autogpt-server-api/types"; -import { BlockSchema } from "@/lib/types"; import { beautifyString, setNestedProperty } from "@/lib/utils"; +import { Button } from "@/components/ui/button"; import { Switch } from "@/components/ui/switch"; -import NodeHandle from "./NodeHandle"; -import NodeInputField from "./NodeInputField"; import { Copy, Trash2 } from "lucide-react"; import { history } from "./history"; +import NodeHandle from "./NodeHandle"; +import { CustomEdgeData } from "./CustomEdge"; +import { NodeGenericInputField } from "./node-input-components"; + +type ParsedKey = { key: string; index?: number }; export type CustomNodeData = { blockType: string; @@ -37,8 +40,8 @@ export type CustomNodeData = { targetHandle: string; }>; isOutputOpen: boolean; - status?: string; - output_data?: any; + status?: NodeExecutionResult["status"]; + output_data?: NodeExecutionResult["output_data"]; block_id: string; backend_id?: string; errors?: { [key: string]: string | null }; @@ -55,7 +58,10 @@ const CustomNode: FC> = ({ data, id }) => { const [isOutputModalOpen, setIsOutputModalOpen] = useState(false); const [isHovered, setIsHovered] = useState(false); - const { getNode, setNodes, getEdges, setEdges } = useReactFlow(); + const { getNode, setNodes, getEdges, setEdges } = useReactFlow< + CustomNodeData, + CustomEdgeData + >(); const outputDataRef = useRef(null); const isInitialSetup = useRef(true); @@ -86,14 +92,11 @@ const CustomNode: FC> = ({ data, id }) => { setIsAdvancedOpen(checked); }; - const hasOptionalFields = () => { - return ( - data.inputSchema && - Object.keys(data.inputSchema.properties).some((key) => { - return !data.inputSchema.required?.includes(key); - }) - ); - }; + const hasOptionalFields = + data.inputSchema && + Object.keys(data.inputSchema.properties).some((key) => { + return !data.inputSchema.required?.includes(key); + }); const generateOutputHandles = (schema: BlockIORootSchema) => { if (!schema?.properties) return null; @@ -110,16 +113,30 @@ const CustomNode: FC> = ({ data, id }) => { )); }; - const handleInputChange = (key: string, value: any) => { - const keys = key.split("."); + const handleInputChange = (path: string, value: any) => { + const keys = parseKeys(path); const newValues = JSON.parse(JSON.stringify(data.hardcodedValues)); let current = newValues; for (let i = 0; i < keys.length - 1; i++) { - if (!current[keys[i]]) current[keys[i]] = {}; - current = current[keys[i]]; + const { key: currentKey, index } = keys[i]; + if (index !== undefined) { + if (!current[currentKey]) current[currentKey] = []; + if (!current[currentKey][index]) current[currentKey][index] = {}; + current = current[currentKey][index]; + } else { + if (!current[currentKey]) current[currentKey] = {}; + current = current[currentKey]; + } + } + + const lastKey = keys[keys.length - 1]; + if (lastKey.index !== undefined) { + if (!current[lastKey.key]) current[lastKey.key] = []; + current[lastKey.key][lastKey.index] = value; + } else { + current[lastKey.key] = value; } - current[keys[keys.length - 1]] = value; console.log(`Updating hardcoded values for node ${id}:`, newValues); @@ -135,16 +152,49 @@ const CustomNode: FC> = ({ data, id }) => { data.setHardcodedValues(newValues); const errors = data.errors || {}; // Remove error with the same key - setNestedProperty(errors, key, null); + setNestedProperty(errors, path, null); data.setErrors({ ...errors }); }; + // Helper function to parse keys with array indices + const parseKeys = (key: string): ParsedKey[] => { + const regex = /(\w+)|\[(\d+)\]/g; + const keys: ParsedKey[] = []; + let match; + let currentKey: string | null = null; + + while ((match = regex.exec(key)) !== null) { + if (match[1]) { + if (currentKey !== null) { + keys.push({ key: currentKey }); + } + currentKey = match[1]; + } else if (match[2]) { + if (currentKey !== null) { + keys.push({ key: currentKey, index: parseInt(match[2], 10) }); + currentKey = null; + } else { + throw new Error("Invalid key format: array index without a key"); + } + } + } + + if (currentKey !== null) { + keys.push({ key: currentKey }); + } + + return keys; + }; + const getValue = (key: string) => { - const keys = key.split("."); - return keys.reduce( - (acc, k) => (acc && acc[k] !== undefined ? acc[k] : ""), - data.hardcodedValues, - ); + const keys = parseKeys(key); + return keys.reduce((acc, k) => { + if (acc === undefined) return undefined; + if (k.index !== undefined) { + return Array.isArray(acc[k.key]) ? acc[k.key][k.index] : undefined; + } + return acc[k.key]; + }, data.hardcodedValues as any); }; const isHandleConnected = (key: string) => { @@ -208,12 +258,10 @@ const CustomNode: FC> = ({ data, id }) => { const handleHovered = () => { setIsHovered(true); - console.log("isHovered", isHovered); }; const handleMouseLeave = () => { setIsHovered(false); - console.log("isHovered", isHovered); }; const deleteNode = useCallback(() => { @@ -274,58 +322,67 @@ const CustomNode: FC> = ({ data, id }) => {
{beautifyString(data.blockType?.replace(/Block$/, "") || data.title)}
-
+
{isHovered && ( <> - - + )}
-
+
{data.inputSchema && - Object.entries(data.inputSchema.properties).map(([key, schema]) => { - const isRequired = data.inputSchema.required?.includes(key); - return ( - (isRequired || isAdvancedOpen) && ( -
{}}> - - {!isHandleConnected(key) && ( - { + const isRequired = data.inputSchema.required?.includes(propKey); + const isConnected = isHandleConnected(propKey); + return ( + (isRequired || isAdvancedOpen || isConnected) && ( +
{}}> + - )} -
- ) - ); - })} + {!isConnected && ( + + )} +
+ ) + ); + }, + )}
-
+
{data.outputSchema && generateOutputHandles(data.outputSchema)}
@@ -355,14 +412,11 @@ const CustomNode: FC> = ({ data, id }) => {
)}
- + Output - {hasOptionalFields() && ( + {hasOptionalFields && ( <> - + Advanced )} diff --git a/rnd/autogpt_builder/src/components/Flow.tsx b/rnd/autogpt_builder/src/components/Flow.tsx index 9441960c7a..bc4bff837d 100644 --- a/rnd/autogpt_builder/src/components/Flow.tsx +++ b/rnd/autogpt_builder/src/components/Flow.tsx @@ -17,17 +17,16 @@ import ReactFlow, { Connection, EdgeTypes, MarkerType, + Controls, } from "reactflow"; import "reactflow/dist/style.css"; import CustomNode, { CustomNodeData } from "./CustomNode"; import "./flow.css"; import AutoGPTServerAPI, { Block, - BlockIOSchema, Graph, NodeExecutionResult, } from "@/lib/autogpt-server-api"; -import { Play, Undo2, Redo2 } from "lucide-react"; import { deepEquals, getTypeColor, @@ -41,6 +40,7 @@ import Ajv from "ajv"; import { Control, ControlPanel } from "@/components/edit/control/ControlPanel"; import { SaveControl } from "@/components/edit/control/SaveControl"; import { BlocksControl } from "@/components/edit/control/BlocksControl"; +import { IconPlay, IconRedo2, IconUndo2 } from "@/components/ui/icons"; // This is for the history, this is the minimum distance a block must move before it is logged // It helps to prevent spamming the history with small movements especially when pressing on a input in a block @@ -458,7 +458,6 @@ const FlowEditor: React.FC<{ targetHandle: link.sink_name, })), isOutputOpen: false, - setIsAnyModalOpen: setIsAnyModalOpen, // Pass setIsAnyModalOpen function setErrors: (errors: { [key: string]: string | null }) => { setNodes((nds) => nds.map((node) => @@ -502,11 +501,7 @@ const FlowEditor: React.FC<{ ); } - const prepareNodeInputData = ( - node: Node, - allNodes: Node[], - allEdges: Edge[], - ) => { + const prepareNodeInputData = (node: Node) => { console.log("Preparing input data for node:", node.id, node.data.blockType); const blockSchema = availableNodes.find( @@ -519,7 +514,7 @@ const FlowEditor: React.FC<{ } const getNestedData = ( - schema: BlockIOSchema, + schema: BlockIOSubSchema, values: { [key: string]: any }, ): { [key: string]: any } => { let inputData: { [key: string]: any } = {}; @@ -580,7 +575,7 @@ const FlowEditor: React.FC<{ const key = `${node.data.block_id}_${node.position.x}_${node.position.y}`; blockIdToNodeIdMap[key] = node.id; }); - const inputDefault = prepareNodeInputData(node, nodes, edges); + const inputDefault = prepareNodeInputData(node); const inputNodes = edges .filter((edge) => edge.target === node.id) .map((edge) => ({ @@ -685,7 +680,10 @@ const FlowEditor: React.FC<{ // Populate errors if validation fails validate.errors?.forEach((error) => { // Skip error if there's an edge connected - const path = error.instancePath || error.schemaPath; + const path = + "dataPath" in error + ? (error.dataPath as string) + : error.instancePath; const handle = path.split(/[\/.]/)[0]; if ( node.data.connections.some( @@ -845,17 +843,17 @@ const FlowEditor: React.FC<{ const editorControls: Control[] = [ { label: "Undo", - icon: , + icon: , onClick: handleUndo, }, { label: "Redo", - icon: , + icon: , onClick: handleRedo, }, { label: "Run", - icon: , + icon: , onClick: runAgent, }, ]; @@ -883,17 +881,16 @@ const FlowEditor: React.FC<{ onNodeDragStart={onNodesChangeStart} onNodeDragStop={onNodesChangeEnd} > -
- - - - -
+ + + + +
); diff --git a/rnd/autogpt_builder/src/components/NavBar.tsx b/rnd/autogpt_builder/src/components/NavBar.tsx index 580ac446c3..52c344041c 100644 --- a/rnd/autogpt_builder/src/components/NavBar.tsx +++ b/rnd/autogpt_builder/src/components/NavBar.tsx @@ -1,19 +1,17 @@ -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; import Link from "next/link"; -import { CircleUser, Menu, SquareActivity, Workflow } from "lucide-react"; -import { Button, buttonVariants } from "@/components/ui/button"; +import { Button } from "@/components/ui/button"; import React from "react"; import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet"; -import { Pencil1Icon, TimerIcon, ArchiveIcon } from "@radix-ui/react-icons"; -import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; import Image from "next/image"; import getServerUser from "@/hooks/getServerUser"; import ProfileDropdown from "./ProfileDropdown"; +import { + IconCircleUser, + IconMenu, + IconPackage2, + IconSquareActivity, + IconWorkFlow, +} from "@/components/ui/icons"; export async function NavBar() { const isAvailable = Boolean( @@ -32,7 +30,7 @@ export async function NavBar() { size="icon" className="shrink-0 md:hidden" > - + Toggle navigation menu @@ -42,19 +40,19 @@ export async function NavBar() { href="/monitor" className="text-muted-foreground hover:text-foreground flex flex-row gap-2 " > - Monitor + Monitor - Build + Build - Marketplace + Marketplace @@ -64,19 +62,19 @@ export async function NavBar() { href="/monitor" className="text-muted-foreground hover:text-foreground flex flex-row gap-2 items-center" > - Monitor + Monitor - Build + Build - Marketplace + Marketplace
@@ -104,7 +102,7 @@ export async function NavBar() { className="text-muted-foreground hover:text-foreground flex flex-row gap-2 items-center" > Log In - + )} {isAvailable && user && } diff --git a/rnd/autogpt_builder/src/components/NodeHandle.tsx b/rnd/autogpt_builder/src/components/NodeHandle.tsx index a61ac48777..b300929f88 100644 --- a/rnd/autogpt_builder/src/components/NodeHandle.tsx +++ b/rnd/autogpt_builder/src/components/NodeHandle.tsx @@ -1,4 +1,4 @@ -import { BlockIOSchema } from "@/lib/autogpt-server-api/types"; +import { BlockIOSubSchema } from "@/lib/autogpt-server-api/types"; import { beautifyString, getTypeBgColor, getTypeTextColor } from "@/lib/utils"; import { FC } from "react"; import { Handle, Position } from "reactflow"; @@ -6,7 +6,7 @@ import SchemaTooltip from "./SchemaTooltip"; type HandleProps = { keyName: string; - schema: BlockIOSchema; + schema: BlockIOSubSchema; isConnected: boolean; isRequired?: boolean; side: "left" | "right"; @@ -28,7 +28,7 @@ const NodeHandle: FC = ({ null: "null", }; - const typeClass = `text-sm ${getTypeTextColor(schema.type)} ${side === "left" ? "text-left" : "text-right"}`; + const typeClass = `text-sm ${getTypeTextColor(schema.type || "any")} ${side === "left" ? "text-left" : "text-right"}`; const label = (
@@ -36,13 +36,13 @@ const NodeHandle: FC = ({ {schema.title || beautifyString(keyName)} {isRequired ? "*" : ""} - {typeName[schema.type]} + {typeName[schema.type] || "any"}
); const dot = (
); @@ -53,7 +53,7 @@ const NodeHandle: FC = ({ type="target" position={Position.Left} id={keyName} - className="group -ml-[29px]" + className="group -ml-[26px]" >
{dot} @@ -70,7 +70,7 @@ const NodeHandle: FC = ({ type="source" position={Position.Right} id={keyName} - className="group -mr-[29px]" + className="group -mr-[26px]" >
{label} diff --git a/rnd/autogpt_builder/src/components/NodeInputField.tsx b/rnd/autogpt_builder/src/components/NodeInputField.tsx deleted file mode 100644 index 566ccecc94..0000000000 --- a/rnd/autogpt_builder/src/components/NodeInputField.tsx +++ /dev/null @@ -1,357 +0,0 @@ -import { Cross2Icon, PlusIcon } from "@radix-ui/react-icons"; -import { beautifyString } from "@/lib/utils"; -import { BlockIOSchema } from "@/lib/autogpt-server-api/types"; -import { FC, useState } from "react"; -import { Button } from "./ui/button"; -import { Input } from "./ui/input"; - -type BlockInputFieldProps = { - keyName: string; - schema: BlockIOSchema; - parentKey?: string; - value: string | Array | { [key: string]: string }; - handleInputClick: (key: string) => void; - handleInputChange: (key: string, value: any) => void; - errors?: { [key: string]: string } | string | null; -}; - -const NodeInputField: FC = ({ - keyName: key, - schema, - parentKey = "", - value, - handleInputClick, - handleInputChange, - errors, -}) => { - const fullKey = parentKey ? `${parentKey}.${key}` : key; - const error = typeof errors === "string" ? errors : (errors?.[key] ?? ""); - const displayKey = schema.title || beautifyString(key); - - const [keyValuePairs, _setKeyValuePairs] = useState< - { key: string; value: string }[] - >( - "additionalProperties" in schema && value - ? Object.entries(value).map(([key, value]) => ({ - key: key, - value: value, - })) - : [], - ); - - function setKeyValuePairs(newKVPairs: typeof keyValuePairs): void { - _setKeyValuePairs(newKVPairs); - handleInputChange( - fullKey, - newKVPairs.reduce( - (obj, { key, value }) => ({ ...obj, [key]: value }), - {}, - ), - ); - } - - const renderClickableInput = ( - value: string | null = null, - placeholder: string = "", - secret: boolean = false, - ) => { - const className = `clickable-input ${error ? "border-error" : ""}`; - - return secret ? ( -
handleInputClick(fullKey)}> - {value ? ( - ******** - ) : ( - {placeholder} - )} -
- ) : ( -
handleInputClick(fullKey)}> - {value || {placeholder}} -
- ); - }; - - if ("properties" in schema) { - return ( -
- {displayKey}: - {Object.entries(schema.properties).map(([propKey, propSchema]) => ( -
- -
- ))} -
- ); - } - - if (schema.type === "object" && schema.additionalProperties) { - return ( -
-
- {keyValuePairs.map(({ key, value }, index) => ( -
- - setKeyValuePairs( - keyValuePairs.toSpliced(index, 1, { - key: e.target.value, - value: value, - }), - ) - } - /> - - setKeyValuePairs( - keyValuePairs.toSpliced(index, 1, { - key: key, - value: e.target.value, - }), - ) - } - /> - -
- ))} - -
- {error && {error}} -
- ); - } - - if ("anyOf" in schema) { - const types = schema.anyOf.map((s) => ("type" in s ? s.type : undefined)); - if (types.includes("string") && types.includes("null")) { - return ( -
- {renderClickableInput( - value as string, - schema.placeholder || `Enter ${displayKey} (optional)`, - )} - {error && {error}} -
- ); - } - } - - if ("allOf" in schema) { - return ( -
- {displayKey}: - {"properties" in schema.allOf[0] && - Object.entries(schema.allOf[0].properties).map( - ([propKey, propSchema]) => ( -
- -
- ), - )} -
- ); - } - - if ("oneOf" in schema) { - return ( -
- {displayKey}: - {"properties" in schema.oneOf[0] && - Object.entries(schema.oneOf[0].properties).map( - ([propKey, propSchema]) => ( -
- -
- ), - )} -
- ); - } - - if (!("type" in schema)) { - console.warn(`Schema for input ${key} does not specify a type:`, schema); - return ( -
- {renderClickableInput( - value as string, - schema.placeholder || `Enter ${beautifyString(displayKey)} (Complex)`, - )} - {error && {error}} -
- ); - } - - switch (schema.type) { - case "string": - if (schema.enum) { - return ( -
- - {error && {error}} -
- ); - } - - if (schema.secret) { - return ( -
- {renderClickableInput( - value as string, - schema.placeholder || `Enter ${displayKey}`, - true, - )} - {error && {error}} -
- ); - } - - return ( -
- {renderClickableInput( - value as string, - schema.placeholder || `Enter ${displayKey}`, - )} - {error && {error}} -
- ); - case "boolean": - return ( -
- - {error && {error}} -
- ); - case "number": - case "integer": - return ( -
- - handleInputChange(fullKey, parseFloat(e.target.value)) - } - className={`number-input ${error ? "border-error" : ""}`} - /> - {error && {error}} -
- ); - case "array": - if (schema.items && schema.items.type === "string") { - const arrayValues = (value as Array) || []; - return ( -
- {arrayValues.map((item: string, index: number) => ( -
- - handleInputChange(`${fullKey}.${index}`, e.target.value) - } - className="array-item-input" - /> - -
- ))} - - {error && {error}} -
- ); - } - return null; - default: - console.warn(`Schema for input ${key} specifies unknown type:`, schema); - return ( -
- {renderClickableInput( - value as string, - schema.placeholder || - `Enter ${beautifyString(displayKey)} (Complex)`, - )} - {error && {error}} -
- ); - } -}; - -export default NodeInputField; diff --git a/rnd/autogpt_builder/src/components/SchemaTooltip.tsx b/rnd/autogpt_builder/src/components/SchemaTooltip.tsx index aa2131a6a3..e16db7508a 100644 --- a/rnd/autogpt_builder/src/components/SchemaTooltip.tsx +++ b/rnd/autogpt_builder/src/components/SchemaTooltip.tsx @@ -4,11 +4,11 @@ import { TooltipProvider, TooltipTrigger, } from "@/components/ui/tooltip"; -import { BlockIOSchema } from "@/lib/autogpt-server-api/types"; +import { BlockIOSubSchema } from "@/lib/autogpt-server-api/types"; import { Info } from "lucide-react"; import ReactMarkdown from "react-markdown"; -const SchemaTooltip: React.FC<{ schema: BlockIOSchema }> = ({ schema }) => { +const SchemaTooltip: React.FC<{ schema: BlockIOSubSchema }> = ({ schema }) => { if (!schema.description) return null; return ( diff --git a/rnd/autogpt_builder/src/components/TallyPopup.tsx b/rnd/autogpt_builder/src/components/TallyPopup.tsx new file mode 100644 index 0000000000..0c9c179362 --- /dev/null +++ b/rnd/autogpt_builder/src/components/TallyPopup.tsx @@ -0,0 +1,59 @@ +"use client"; +import React, { useEffect, useState } from "react"; +import { Button } from "./ui/button"; +import { IconMegaphone } from "@/components/ui/icons"; + +const TallyPopupSimple = () => { + const [isFormVisible, setIsFormVisible] = useState(false); + + useEffect(() => { + // Load Tally script + const script = document.createElement("script"); + script.src = "https://tally.so/widgets/embed.js"; + script.async = true; + document.head.appendChild(script); + + // Setup event listeners for Tally events + const handleTallyMessage = (event: MessageEvent) => { + if (typeof event.data === "string") { + try { + const data = JSON.parse(event.data); + if (data.event === "Tally.FormLoaded") { + setIsFormVisible(true); + } else if (data.event === "Tally.PopupClosed") { + setIsFormVisible(false); + } + } catch (error) { + console.error("Error parsing Tally message:", error); + } + } + }; + + window.addEventListener("message", handleTallyMessage); + + return () => { + document.head.removeChild(script); + window.removeEventListener("message", handleTallyMessage); + }; + }, []); + + if (isFormVisible) { + return null; // Hide the button when the form is visible + } + + return ( +
+ +
+ ); +}; + +export default TallyPopupSimple; diff --git a/rnd/autogpt_builder/src/components/customnode.css b/rnd/autogpt_builder/src/components/customnode.css index c6968fed73..1dcf75b789 100644 --- a/rnd/autogpt_builder/src/components/customnode.css +++ b/rnd/autogpt_builder/src/components/customnode.css @@ -1,5 +1,5 @@ .custom-node { - padding: 15px; + @apply p-3; border: 3px solid #4b5563; border-radius: 12px; background: #ffffff; @@ -9,13 +9,6 @@ transition: border-color 0.3s ease-in-out; } -.node-content { - display: flex; - justify-content: space-between; - align-items: flex-start; - gap: 1px; -} - .custom-node .mb-2 { display: flex; justify-content: space-between; @@ -30,45 +23,6 @@ margin-right: 10px; } -.node-actions { - display: flex; - gap: 5px; -} - -.node-action-button { - width: 32px; - /* Increased size */ - height: 32px; - /* Increased size */ - display: flex; - align-items: center; - justify-content: center; - background-color: #f3f4f6; - /* Light gray background */ - border: 1px solid #d1d5db; - /* Light border */ - border-radius: 6px; - color: #4b5563; - transition: all 0.2s ease-in-out; - cursor: pointer; -} - -.node-action-button:hover { - background-color: #e5e7eb; - color: #1f2937; -} - -.node-action-button:focus { - outline: none; - box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.5); -} - -.node-action-button svg { - width: 18px; - /* Increased icon size */ - height: 18px; - /* Increased icon size */ -} /* Existing styles */ .handle-container { display: flex; @@ -89,38 +43,10 @@ transform: none; } -.input-container { - margin-bottom: 5px; -} - -.clickable-input { - padding: 5px; - width: 325px; - border-radius: 4px; - background: #ffffff; - border: 1px solid #d1d1d1; - color: #000000; - cursor: pointer; - word-break: break-all; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - position: relative; -} - .border-error { border: 1px solid #d9534f; } -.clickable-input span { - display: inline-block; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - max-width: calc(100% - 100px); - vertical-align: middle; -} - .select-input { width: 100%; padding: 5px; @@ -191,29 +117,9 @@ .error-message { color: #d9534f; - font-size: 12px; + font-size: 13px; margin-top: 5px; -} - -.object-input { - margin-left: 10px; - border-left: 1px solid #000; /* Border for nested inputs */ - padding-left: 10px; -} - -.nested-input { - margin-top: 5px; -} - -.key-value-input { - display: flex; - gap: 5px; - align-items: center; - margin-bottom: 5px; -} - -.key-value-input input { - flex-grow: 1; + margin-left: 5px; } /* Styles for node states */ @@ -240,3 +146,13 @@ .custom-switch { padding-left: 2px; } + +input[type="number"]::-webkit-outer-spin-button, +input[type="number"]::-webkit-inner-spin-button { + -webkit-appearance: none; + margin: 0; +} + +input[type="number"] { + -moz-appearance: textfield; +} diff --git a/rnd/autogpt_builder/src/components/edit/ControlPanel.tsx b/rnd/autogpt_builder/src/components/edit/ControlPanel.tsx deleted file mode 100644 index 93d6d9acba..0000000000 --- a/rnd/autogpt_builder/src/components/edit/ControlPanel.tsx +++ /dev/null @@ -1,65 +0,0 @@ -import { Card, CardContent } from "@/components/ui/card"; -import { - Tooltip, - TooltipContent, - TooltipTrigger, -} from "@/components/ui/tooltip"; -import { Button } from "@/components/ui/button"; -import { Separator } from "@/components/ui/separator"; -import React from "react"; - -/** - * Represents a control element for the ControlPanel Component. - * @type {Object} Control - * @property {React.ReactNode} icon - The icon of the control from lucide-react https://lucide.dev/icons/ - * @property {string} label - The label of the control, to be leveraged by ToolTip. - * @property {onclick} onClick - The function to be executed when the control is clicked. - */ -export type Control = { - icon: React.ReactNode; - label: string; - onClick: () => void; -}; - -interface ControlPanelProps { - controls: Control[]; - children?: React.ReactNode; -} - -/** - * ControlPanel component displays a panel with controls as icons with the ability to take in children. - * @param {Object} ControlPanelProps - The properties of the control panel component. - * @param {Array} ControlPanelProps.controls - An array of control objects representing actions to be preformed. - * @param {Array} ControlPanelProps.children - The child components of the control panel. - * @returns The rendered control panel component. - */ -export const ControlPanel = ({ controls, children }: ControlPanelProps) => { - return ( - - ); -}; -export default ControlPanel; diff --git a/rnd/autogpt_builder/src/components/edit/control/BlocksControl.tsx b/rnd/autogpt_builder/src/components/edit/control/BlocksControl.tsx index 4a834a13c8..3817799aeb 100644 --- a/rnd/autogpt_builder/src/components/edit/control/BlocksControl.tsx +++ b/rnd/autogpt_builder/src/components/edit/control/BlocksControl.tsx @@ -13,6 +13,7 @@ import { } from "@/components/ui/popover"; import { Block } from "@/lib/autogpt-server-api"; import { PlusIcon } from "@radix-ui/react-icons"; +import { IconToyBrick } from "@/components/ui/icons"; interface BlocksControlProps { blocks: Block[]; @@ -40,12 +41,14 @@ export const BlocksControl: React.FC = ({ return ( - - + + diff --git a/rnd/autogpt_builder/src/components/edit/control/ControlPanel.tsx b/rnd/autogpt_builder/src/components/edit/control/ControlPanel.tsx index c129579385..e4600eae25 100644 --- a/rnd/autogpt_builder/src/components/edit/control/ControlPanel.tsx +++ b/rnd/autogpt_builder/src/components/edit/control/ControlPanel.tsx @@ -6,6 +6,7 @@ import { } from "@/components/ui/tooltip"; import { Button } from "@/components/ui/button"; import { Separator } from "@/components/ui/separator"; +import { cn } from "@/lib/utils"; import React from "react"; /** @@ -24,42 +25,46 @@ export type Control = { interface ControlPanelProps { controls: Control[]; children?: React.ReactNode; + className?: string; } /** - * ControlPanel component displays a panel with controls as icons with the ability to take in children. + * ControlPanel component displays a panel with controls as icons.tsx with the ability to take in children. * @param {Object} ControlPanelProps - The properties of the control panel component. * @param {Array} ControlPanelProps.controls - An array of control objects representing actions to be preformed. * @param {Array} ControlPanelProps.children - The child components of the control panel. + * @param {string} ControlPanelProps.className - Additional CSS class names for the control panel. * @returns The rendered control panel component. */ -export const ControlPanel = ({ controls, children }: ControlPanelProps) => { +export const ControlPanel = ({ + controls, + children, + className, +}: ControlPanelProps) => { return ( - + + +
+ {children} + + {controls.map((control, index) => ( + + + + + {control.label} + + ))} +
+
+
); }; export default ControlPanel; diff --git a/rnd/autogpt_builder/src/components/edit/control/SaveControl.tsx b/rnd/autogpt_builder/src/components/edit/control/SaveControl.tsx index 6b7789fb6e..2cdd10f96d 100644 --- a/rnd/autogpt_builder/src/components/edit/control/SaveControl.tsx +++ b/rnd/autogpt_builder/src/components/edit/control/SaveControl.tsx @@ -9,7 +9,7 @@ import { Input } from "@/components/ui/input"; import { Button } from "@/components/ui/button"; import { GraphMeta } from "@/lib/autogpt-server-api"; import { Label } from "@/components/ui/label"; -import { Save } from "lucide-react"; +import { IconSave } from "@/components/ui/icons"; interface SaveControlProps { agentMeta: GraphMeta | null; @@ -51,8 +51,10 @@ export const SaveControl = ({ return ( - - + + diff --git a/rnd/autogpt_builder/src/components/flow.css b/rnd/autogpt_builder/src/components/flow.css index d74b8ddeea..830099e750 100644 --- a/rnd/autogpt_builder/src/components/flow.css +++ b/rnd/autogpt_builder/src/components/flow.css @@ -11,20 +11,6 @@ code { monospace; } -button { - background-color: #ffffff; - color: #000000; - padding: 10px; - border: none; - border-radius: 4px; - cursor: pointer; - transition: background-color 0.3s ease; -} - -button:hover { - background-color: #666; -} - input, textarea { background-color: #ffffff; @@ -128,24 +114,3 @@ textarea::placeholder { width: 100%; height: 600px; /* Adjust this height as needed */ } - -.flow-wrapper { - height: 100%; - width: 100%; - display: flex; - justify-content: center; - align-items: center; -} - -.flow-controls { - position: absolute; - left: -80px; - z-index: 1001; - display: flex; - gap: 10px; - transition: transform 0.3s ease; -} - -.flow-controls.open { - transform: translateX(350px); -} diff --git a/rnd/autogpt_builder/src/components/monitor/AgentFlowList.tsx b/rnd/autogpt_builder/src/components/monitor/AgentFlowList.tsx new file mode 100644 index 0000000000..0acf6ad5e3 --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/AgentFlowList.tsx @@ -0,0 +1,187 @@ +import AutoGPTServerAPI, { GraphMeta } from "@/lib/autogpt-server-api"; +import React, { useEffect, useState } from "react"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Button } from "@/components/ui/button"; +import Link from "next/link"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTrigger, +} from "@/components/ui/dialog"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { ChevronDownIcon, EnterIcon } from "@radix-ui/react-icons"; +import { AgentImportForm } from "@/components/agent-import-form"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import moment from "moment/moment"; +import { FlowRun } from "@/lib/types"; + +export const AgentFlowList = ({ + flows, + flowRuns, + selectedFlow, + onSelectFlow, + className, +}: { + flows: GraphMeta[]; + flowRuns?: FlowRun[]; + selectedFlow: GraphMeta | null; + onSelectFlow: (f: GraphMeta) => void; + className?: string; +}) => { + const [templates, setTemplates] = useState([]); + const api = new AutoGPTServerAPI(); + useEffect(() => { + api.listTemplates().then((templates) => setTemplates(templates)); + }, []); + + return ( + + + Agents + +
+ {/* Split "Create" button */} + + + {/* https://ui.shadcn.com/docs/components/dialog#notes */} + + + + + + + + + Import from file + + + {templates.length > 0 && ( + <> + {/* List of templates */} + + Use a template + {templates.map((template) => ( + { + api + .createGraph(template.id, template.version) + .then((newGraph) => { + window.location.href = `/build?flowID=${newGraph.id}`; + }); + }} + > + {template.name} + + ))} + + )} + + + + + + Import an Agent (template) from a file + + + + +
+
+ + + + + + Name + {/* Status */} + {/* Last updated */} + {flowRuns && ( + + # of runs + + )} + {flowRuns && Last run} + + + + {flows + .map((flow) => { + let runCount = 0, + lastRun: FlowRun | null = null; + if (flowRuns) { + const _flowRuns = flowRuns.filter( + (r) => r.graphID == flow.id, + ); + runCount = _flowRuns.length; + lastRun = + runCount == 0 + ? null + : _flowRuns.reduce((a, c) => + a.startTime > c.startTime ? a : c, + ); + } + return { flow, runCount, lastRun }; + }) + .sort((a, b) => { + if (!a.lastRun && !b.lastRun) return 0; + if (!a.lastRun) return 1; + if (!b.lastRun) return -1; + return b.lastRun.startTime - a.lastRun.startTime; + }) + .map(({ flow, runCount, lastRun }) => ( + onSelectFlow(flow)} + data-state={selectedFlow?.id == flow.id ? "selected" : null} + > + {flow.name} + {/* */} + {/* + {flow.updatedAt ?? "???"} + */} + {flowRuns && ( + + {runCount} + + )} + {flowRuns && + (!lastRun ? ( + + ) : ( + + {moment(lastRun.startTime).fromNow()} + + ))} + + ))} + +
+
+
+ ); +}; +export default AgentFlowList; diff --git a/rnd/autogpt_builder/src/components/monitor/FlowInfo.tsx b/rnd/autogpt_builder/src/components/monitor/FlowInfo.tsx new file mode 100644 index 0000000000..0ec6d2187b --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowInfo.tsx @@ -0,0 +1,134 @@ +import React, { useEffect, useState } from "react"; +import AutoGPTServerAPI, { + Graph, + GraphMeta, + safeCopyGraph, +} from "@/lib/autogpt-server-api"; +import { FlowRun } from "@/lib/types"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuLabel, + DropdownMenuRadioGroup, + DropdownMenuRadioItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Button, buttonVariants } from "@/components/ui/button"; +import { ClockIcon, ExitIcon, Pencil2Icon } from "@radix-ui/react-icons"; +import Link from "next/link"; +import { exportAsJSONFile } from "@/lib/utils"; +import { FlowRunsStats } from "@/components/monitor/index"; + +export const FlowInfo: React.FC< + React.HTMLAttributes & { + flow: GraphMeta; + flowRuns: FlowRun[]; + flowVersion?: number | "all"; + } +> = ({ flow, flowRuns, flowVersion, ...props }) => { + const api = new AutoGPTServerAPI(); + + const [flowVersions, setFlowVersions] = useState(null); + const [selectedVersion, setSelectedFlowVersion] = useState( + flowVersion ?? "all", + ); + const selectedFlowVersion: Graph | undefined = flowVersions?.find( + (v) => + v.version == (selectedVersion == "all" ? flow.version : selectedVersion), + ); + + useEffect(() => { + api.getGraphAllVersions(flow.id).then((result) => setFlowVersions(result)); + }, [flow.id]); + + return ( + + +
+ + {flow.name} v{flow.version} + +

+ Agent ID: {flow.id} +

+
+
+ {(flowVersions?.length ?? 0) > 1 && ( + + + + + + Choose a version + + + setSelectedFlowVersion( + choice == "all" ? choice : Number(choice), + ) + } + > + + All versions + + {flowVersions?.map((v) => ( + + Version {v.version} + {v.is_active ? " (active)" : ""} + + ))} + + + + )} + + Edit + + +
+
+ + + r.graphID == flow.id && + (selectedVersion == "all" || r.graphVersion == selectedVersion), + )} + /> + +
+ ); +}; +export default FlowInfo; diff --git a/rnd/autogpt_builder/src/components/monitor/FlowRunInfo.tsx b/rnd/autogpt_builder/src/components/monitor/FlowRunInfo.tsx new file mode 100644 index 0000000000..1ace5207ce --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowRunInfo.tsx @@ -0,0 +1,66 @@ +import React from "react"; +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { FlowRun } from "@/lib/types"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import Link from "next/link"; +import { buttonVariants } from "@/components/ui/button"; +import { Pencil2Icon } from "@radix-ui/react-icons"; +import moment from "moment/moment"; +import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge"; + +export const FlowRunInfo: React.FC< + React.HTMLAttributes & { + flow: GraphMeta; + flowRun: FlowRun; + } +> = ({ flow, flowRun, ...props }) => { + if (flowRun.graphID != flow.id) { + throw new Error( + `FlowRunInfo can't be used with non-matching flowRun.flowID and flow.id`, + ); + } + + return ( + + +
+ + {flow.name} v{flow.version} + +

+ Agent ID: {flow.id} +

+

+ Run ID: {flowRun.id} +

+
+ + Edit Agent + +
+ +

+ Status:{" "} + +

+

+ Started:{" "} + {moment(flowRun.startTime).format("YYYY-MM-DD HH:mm:ss")} +

+

+ Finished:{" "} + {moment(flowRun.endTime).format("YYYY-MM-DD HH:mm:ss")} +

+

+ Duration (run time): {flowRun.duration} ( + {flowRun.totalRunTime}) seconds +

+ {/*

Total cost: €1,23

*/} +
+
+ ); +}; +export default FlowRunInfo; diff --git a/rnd/autogpt_builder/src/components/monitor/FlowRunStatusBadge.tsx b/rnd/autogpt_builder/src/components/monitor/FlowRunStatusBadge.tsx new file mode 100644 index 0000000000..f054782223 --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowRunStatusBadge.tsx @@ -0,0 +1,25 @@ +import React from "react"; +import { FlowRun } from "@/lib/types"; +import { Badge } from "@/components/ui/badge"; +import { cn } from "@/lib/utils"; + +export const FlowRunStatusBadge: React.FC<{ + status: FlowRun["status"]; + className?: string; +}> = ({ status, className }) => ( + + {status} + +); diff --git a/rnd/autogpt_builder/src/components/monitor/FlowRunsList.tsx b/rnd/autogpt_builder/src/components/monitor/FlowRunsList.tsx new file mode 100644 index 0000000000..ed29355560 --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowRunsList.tsx @@ -0,0 +1,68 @@ +import React from "react"; +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { FlowRun } from "@/lib/types"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import moment from "moment/moment"; +import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge"; + +export const FlowRunsList: React.FC<{ + flows: GraphMeta[]; + runs: FlowRun[]; + className?: string; + selectedRun?: FlowRun | null; + onSelectRun: (r: FlowRun) => void; +}> = ({ flows, runs, selectedRun, onSelectRun, className }) => ( + + + Runs + + + + + + Agent + Started + Status + Duration + + + + {runs.map((run) => ( + onSelectRun(run)} + data-state={selectedRun?.id == run.id ? "selected" : null} + > + + {flows.find((f) => f.id == run.graphID)!.name} + + {moment(run.startTime).format("HH:mm")} + + + + {formatDuration(run.duration)} + + ))} + +
+
+
+); + +function formatDuration(seconds: number): string { + return ( + (seconds < 100 ? seconds.toPrecision(2) : Math.round(seconds)).toString() + + "s" + ); +} + +export default FlowRunsList; diff --git a/rnd/autogpt_builder/src/components/monitor/FlowRunsStatus.tsx b/rnd/autogpt_builder/src/components/monitor/FlowRunsStatus.tsx new file mode 100644 index 0000000000..cf46134942 --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowRunsStatus.tsx @@ -0,0 +1,114 @@ +import React, { useState } from "react"; +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { FlowRun } from "@/lib/types"; +import { CardTitle } from "@/components/ui/card"; +import { Button } from "@/components/ui/button"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { Calendar } from "@/components/ui/calendar"; +import { FlowRunsTimeline } from "@/components/monitor/FlowRunsTimeline"; + +export const FlowRunsStatus: React.FC<{ + flows: GraphMeta[]; + flowRuns: FlowRun[]; + title?: string; + className?: string; +}> = ({ flows, flowRuns, title, className }) => { + /* "dateMin": since the first flow in the dataset + * number > 0: custom date (unix timestamp) + * number < 0: offset relative to Date.now() (in seconds) */ + const [statsSince, setStatsSince] = useState(-24 * 3600); + const statsSinceTimestamp = // unix timestamp or null + typeof statsSince == "string" + ? null + : statsSince < 0 + ? Date.now() + statsSince * 1000 + : statsSince; + const filteredFlowRuns = + statsSinceTimestamp != null + ? flowRuns.filter((fr) => fr.startTime > statsSinceTimestamp) + : flowRuns; + + return ( +
+
+ {title || "Stats"} +
+ + + + + + + + + + + setStatsSince(selectedDay.getTime()) + } + initialFocus + /> + + + +
+
+ +
+
+

+ Total runs: {filteredFlowRuns.length} +

+

+ Total run time:{" "} + {filteredFlowRuns.reduce((total, run) => total + run.totalRunTime, 0)}{" "} + seconds +

+ {/*

Total cost: €1,23

*/} +
+
+ ); +}; +export default FlowRunsStatus; diff --git a/rnd/autogpt_builder/src/components/monitor/FlowRunsTimeline.tsx b/rnd/autogpt_builder/src/components/monitor/FlowRunsTimeline.tsx new file mode 100644 index 0000000000..c094216f49 --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/FlowRunsTimeline.tsx @@ -0,0 +1,172 @@ +import { GraphMeta } from "@/lib/autogpt-server-api"; +import { + ComposedChart, + DefaultLegendContentProps, + Legend, + Line, + ResponsiveContainer, + Scatter, + Tooltip, + XAxis, + YAxis, +} from "recharts"; +import moment from "moment/moment"; +import { Card } from "@/components/ui/card"; +import { cn, hashString } from "@/lib/utils"; +import React from "react"; +import { FlowRun } from "@/lib/types"; +import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge"; + +export const FlowRunsTimeline = ({ + flows, + flowRuns, + dataMin, + className, +}: { + flows: GraphMeta[]; + flowRuns: FlowRun[]; + dataMin: "dataMin" | number; + className?: string; +}) => ( + /* TODO: make logarithmic? */ + + + { + const now = moment(); + const time = moment(unixTime); + return now.diff(time, "hours") < 24 + ? time.format("HH:mm") + : time.format("YYYY-MM-DD HH:mm"); + }} + name="Time" + scale="time" + /> + (s > 90 ? `${Math.round(s / 60)}m` : `${s}s`)} + /> + { + if (payload && payload.length) { + const data: FlowRun & { time: number; _duration: number } = + payload[0].payload; + const flow = flows.find((f) => f.id === data.graphID); + return ( + +

+ Agent: {flow ? flow.name : "Unknown"} +

+

+ Status:  + +

+

+ Started:{" "} + {moment(data.startTime).format("YYYY-MM-DD HH:mm:ss")} +

+

+ Duration / run time:{" "} + {formatDuration(data.duration)} /{" "} + {formatDuration(data.totalRunTime)} +

+
+ ); + } + return null; + }} + /> + {flows.map((flow) => ( + fr.graphID == flow.id) + .map((fr) => ({ + ...fr, + time: fr.startTime + fr.totalRunTime * 1000, + _duration: fr.totalRunTime, + }))} + name={flow.name} + fill={`hsl(${(hashString(flow.id) * 137.5) % 360}, 70%, 50%)`} + /> + ))} + {flowRuns.map((run) => ( + + ))} + } + wrapperStyle={{ + bottom: 0, + left: 0, + right: 0, + width: "100%", + display: "flex", + justifyContent: "center", + }} + /> +
+
+); + +export default FlowRunsTimeline; + +const ScrollableLegend: React.FC< + DefaultLegendContentProps & { className?: string } +> = ({ payload, className }) => { + return ( +
+ {payload?.map((entry, index) => { + if (entry.type == "none") return; + return ( + + + {entry.value} + + ); + })} +
+ ); +}; + +function formatDuration(seconds: number): string { + return ( + (seconds < 100 ? seconds.toPrecision(2) : Math.round(seconds)).toString() + + "s" + ); +} diff --git a/rnd/autogpt_builder/src/components/monitor/index.ts b/rnd/autogpt_builder/src/components/monitor/index.ts new file mode 100644 index 0000000000..0f8f80287c --- /dev/null +++ b/rnd/autogpt_builder/src/components/monitor/index.ts @@ -0,0 +1,6 @@ +export { default as AgentFlowList } from "./AgentFlowList"; +export { default as FlowRunsList } from "./FlowRunsList"; +export { default as FlowInfo } from "./FlowInfo"; +export { default as FlowRunInfo } from "./FlowRunInfo"; +export { default as FlowRunsStats } from "./FlowRunsStatus"; +export { default as FlowRunsTimeline } from "./FlowRunsTimeline"; diff --git a/rnd/autogpt_builder/src/components/node-input-components.tsx b/rnd/autogpt_builder/src/components/node-input-components.tsx new file mode 100644 index 0000000000..6229962c3f --- /dev/null +++ b/rnd/autogpt_builder/src/components/node-input-components.tsx @@ -0,0 +1,616 @@ +import { Cross2Icon, Pencil2Icon, PlusIcon } from "@radix-ui/react-icons"; +import { beautifyString, cn } from "@/lib/utils"; +import { + BlockIORootSchema, + BlockIOSubSchema, + BlockIOObjectSubSchema, + BlockIOKVSubSchema, + BlockIOArraySubSchema, + BlockIOStringSubSchema, + BlockIONumberSubSchema, + BlockIOBooleanSubSchema, +} from "@/lib/autogpt-server-api/types"; +import { FC, useState } from "react"; +import { Button } from "./ui/button"; +import { Switch } from "./ui/switch"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "./ui/select"; +import { Input } from "./ui/input"; + +type NodeObjectInputTreeProps = { + selfKey?: string; + schema: BlockIORootSchema | BlockIOObjectSubSchema; + object?: { [key: string]: any }; + handleInputClick: (key: string) => void; + handleInputChange: (key: string, value: any) => void; + errors: { [key: string]: string | undefined }; + className?: string; + displayName?: string; +}; + +const NodeObjectInputTree: FC = ({ + selfKey = "", + schema, + object, + handleInputClick, + handleInputChange, + errors, + className, + displayName, +}) => { + object ??= ("default" in schema ? schema.default : null) ?? {}; + return ( +
+ {displayName && {displayName}} + {Object.entries(schema.properties).map(([propKey, propSchema]) => { + const childKey = selfKey ? `${selfKey}.${propKey}` : propKey; + + return ( +
+ + {propSchema.title || beautifyString(propKey)} + + +
+ ); + })} +
+ ); +}; + +export default NodeObjectInputTree; + +export const NodeGenericInputField: FC<{ + propKey: string; + propSchema: BlockIOSubSchema; + currentValue?: any; + errors: NodeObjectInputTreeProps["errors"]; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + handleInputClick: NodeObjectInputTreeProps["handleInputClick"]; + className?: string; + displayName?: string; +}> = ({ + propKey, + propSchema, + currentValue, + errors, + handleInputChange, + handleInputClick, + className, + displayName, +}) => { + displayName ??= propSchema.title || beautifyString(propKey); + + if ("allOf" in propSchema) { + // If this happens, that is because Pydantic wraps $refs in an allOf if the + // $ref has sibling schema properties (which isn't technically allowed), + // so there will only be one item in allOf[]. + // AFAIK this should NEVER happen though, as $refs are resolved server-side. + propSchema = propSchema.allOf[0]; + console.warn(`Unsupported 'allOf' in schema for '${propKey}'!`, propSchema); + } + + if ("properties" in propSchema) { + return ( + + ); + } + + if ("additionalProperties" in propSchema) { + return ( + + ); + } + + if ("anyOf" in propSchema) { + // optional items + const types = propSchema.anyOf.map((s) => + "type" in s ? s.type : undefined, + ); + if (types.includes("string") && types.includes("null")) { + // optional string + return ( + + ); + } + } + + if ("oneOf" in propSchema) { + // At the time of writing, this isn't used in the backend -> no impl. needed + console.error( + `Unsupported 'oneOf' in schema for '${propKey}'!`, + propSchema, + ); + return null; + } + + if (!("type" in propSchema)) { + return ( + + ); + } + + switch (propSchema.type) { + case "string": + return ( + + ); + case "boolean": + return ( + + ); + case "number": + case "integer": + return ( + + ); + case "array": + return ( + + ); + default: + console.warn( + `Schema for '${propKey}' specifies unknown type:`, + propSchema, + ); + return ( + + ); + } +}; + +const NodeKeyValueInput: FC<{ + selfKey: string; + schema: BlockIOKVSubSchema; + entries?: { [key: string]: string } | { [key: string]: number }; + errors: { [key: string]: string | undefined }; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + className?: string; + displayName?: string; +}> = ({ + selfKey, + entries, + schema, + handleInputChange, + errors, + className, + displayName, +}) => { + const [keyValuePairs, setKeyValuePairs] = useState< + { + key: string; + value: string | number | null; + }[] + >( + Object.entries(entries ?? schema.default ?? {}).map(([key, value]) => ({ + key, + value: value, + })), + ); + + function updateKeyValuePairs(newPairs: typeof keyValuePairs) { + setKeyValuePairs(newPairs); + handleInputChange( + selfKey, + newPairs.reduce((obj, { key, value }) => ({ ...obj, [key]: value }), {}), + ); + } + + function convertValueType(value: string): string | number | null { + if (schema.additionalProperties.type == "string") return value; + if (!value) return null; + return Number(value); + } + + return ( +
+ {displayName && {displayName}} +
+ {keyValuePairs.map(({ key, value }, index) => ( +
+
+ + updateKeyValuePairs( + keyValuePairs.toSpliced(index, 1, { + key: e.target.value, + value: value, + }), + ) + } + /> + + updateKeyValuePairs( + keyValuePairs.toSpliced(index, 1, { + key: key, + value: convertValueType(e.target.value), + }), + ) + } + /> + +
+ {errors[`${selfKey}.${key}`] && ( + + {errors[`${selfKey}.${key}`]} + + )} +
+ ))} + +
+ {errors[selfKey] && ( + {errors[selfKey]} + )} +
+ ); +}; + +const NodeArrayInput: FC<{ + selfKey: string; + schema: BlockIOArraySubSchema; + entries?: string[]; + errors: { [key: string]: string | undefined }; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + handleInputClick: NodeObjectInputTreeProps["handleInputClick"]; + className?: string; + displayName?: string; +}> = ({ + selfKey, + schema, + entries, + errors, + handleInputChange, + handleInputClick, + className, + displayName, +}) => { + entries ??= schema.default ?? []; + const isItemObject = "items" in schema && "properties" in schema.items!; + const error = + typeof errors[selfKey] === "string" ? errors[selfKey] : undefined; + return ( +
+ {displayName && {displayName}} + {entries.map((entry: any, index: number) => { + const entryKey = `${selfKey}[${index}]`; + return ( +
+
+ {schema.items ? ( + + ) : ( + + )} + +
+ {errors[entryKey] && typeof errors[entryKey] === "string" && ( + {errors[entryKey]} + )} +
+ ); + })} + + {error && {error}} +
+ ); +}; + +const NodeStringInput: FC<{ + selfKey: string; + schema: BlockIOStringSubSchema; + value?: string; + error?: string; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + handleInputClick: NodeObjectInputTreeProps["handleInputClick"]; + className?: string; + displayName: string; +}> = ({ + selfKey, + schema, + value, + error, + handleInputChange, + handleInputClick, + className, + displayName, +}) => { + return ( +
+ {schema.enum ? ( + + ) : ( +
handleInputClick(selfKey) : undefined} + > + handleInputChange(selfKey, e.target.value)} + className="pr-8 read-only:cursor-pointer read-only:text-gray-500" + /> + +
+ )} + {error && {error}} +
+ ); +}; + +const NodeNumberInput: FC<{ + selfKey: string; + schema: BlockIONumberSubSchema; + value?: number; + error?: string; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + className?: string; + displayName?: string; +}> = ({ + selfKey, + schema, + value, + error, + handleInputChange, + className, + displayName, +}) => { + value ??= schema.default; + displayName ??= schema.title || beautifyString(selfKey); + return ( +
+
+ + handleInputChange(selfKey, parseFloat(e.target.value)) + } + placeholder={ + schema.placeholder || `Enter ${beautifyString(displayName)}` + } + /> +
+ {error && {error}} +
+ ); +}; + +const NodeBooleanInput: FC<{ + selfKey: string; + schema: BlockIOBooleanSubSchema; + value?: boolean; + error?: string; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + className?: string; + displayName: string; +}> = ({ + selfKey, + schema, + value, + error, + handleInputChange, + className, + displayName, +}) => { + value ??= schema.default ?? false; + return ( +
+
+ handleInputChange(selfKey, v)} + /> + {displayName} +
+ {error && {error}} +
+ ); +}; + +const NodeFallbackInput: FC<{ + selfKey: string; + schema?: BlockIOSubSchema; + value: any; + error?: string; + handleInputChange: NodeObjectInputTreeProps["handleInputChange"]; + handleInputClick: NodeObjectInputTreeProps["handleInputClick"]; + className?: string; + displayName: string; +}> = ({ + selfKey, + schema, + value, + error, + handleInputChange, + handleInputClick, + className, + displayName, +}) => { + return ( + + ); +}; diff --git a/rnd/autogpt_builder/src/components/ui/icons.tsx b/rnd/autogpt_builder/src/components/ui/icons.tsx new file mode 100644 index 0000000000..28cc978a6e --- /dev/null +++ b/rnd/autogpt_builder/src/components/ui/icons.tsx @@ -0,0 +1,479 @@ +"use client"; + +import * as React from "react"; +import { cn } from "@/lib/utils"; + +/** + * Represents different variants of an icon, based on its size. + */ +const iconVariants = { + size: { + default: "size-4", + sm: "size-2", + lg: "size-6", + }, +} as const; + +/** + * Props for the Icon component. + */ +export interface IconProps extends React.SVGProps { + size?: keyof typeof iconVariants.size; +} + +/** + * Creates an icon component that wraps a given SVG icon component. + * This function applies consistent styling and size variants to the icon. + * + * @template P - The props type for the icon component + * @param {React.FC

} IconComponent - The SVG icon component to be wrapped + * @returns {React.ForwardRefExoticComponent>} + * + */ +const createIcon =

>( + IconComponent: React.FC

, +): React.ForwardRefExoticComponent< + IconProps & React.RefAttributes +> => { + const Icon = React.forwardRef( + ({ className, size = "default", ...props }, ref) => { + return ( + + ); + }, + ); + Icon.displayName = IconComponent.name || "Icon"; + return Icon; +}; + +/** + * Save icon component. + * + * @component IconSave + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The save icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconSave = createIcon((props) => ( + + + + + +)); + +/** + * Undo icon component. + * + * @component IconUndo2 + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The undo icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconUndo2 = createIcon((props) => ( + + + + +)); + +/** + * Redo icon component. + * + * @component IconRedo2 + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The redo icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconRedo2 = createIcon((props) => ( + + + + +)); + +/** + * Toy brick icon component. + * + * @component IconToyBrick + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The toy brick icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconToyBrick = createIcon((props) => ( + + + + + +)); + +/** + * A circle alert icon component. + * + * @component IconCircleAlert + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The circle alert icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconCircleAlert = createIcon((props) => ( + + + + + +)); + +/** + * Circle User icon component. + * + * @component IconCircleUser + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The circle user icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconCircleUser = createIcon((props) => ( + + + + + +)); + +/** + * Menu icon component. + * + * @component IconMenu + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The menu icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconMenu = createIcon((props) => ( + + + + + +)); + +/** + * Square Activity icon component. + * + * @component IconSquareActivity + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The square activity icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconSquareActivity = createIcon((props) => ( + + + + +)); + +/** + * Workflow icon component. + * + * @component IconWorkFlow + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The workflow icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconWorkFlow = createIcon((props) => ( + + + + + +)); + +/** + * Play icon component. + * + * @component IconPlay + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The play icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconPlay = createIcon((props) => ( + + + +)); + +/** + * Package2 icon component. + * + * @component IconPackage2 + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The package2 icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconPackage2 = createIcon((props) => ( + + + + + +)); + +/** + * Megaphone icon component. + * + * @component IconMegaphone + * @param {IconProps} props - The props object containing additional attributes and event handlers for the icon. + * @returns {JSX.Element} - The megaphone icon. + * + * @example + * // Default usage this is the standard usage + * + * + * @example + * // With custom color and size these should be used sparingly and only when necessary + * + * + * @example + * // With custom size and onClick handler + * + */ +export const IconMegaphone = createIcon((props) => ( + + + + +)); + +export { iconVariants }; diff --git a/rnd/autogpt_builder/src/components/ui/select.tsx b/rnd/autogpt_builder/src/components/ui/select.tsx new file mode 100644 index 0000000000..d002f0eb1c --- /dev/null +++ b/rnd/autogpt_builder/src/components/ui/select.tsx @@ -0,0 +1,167 @@ +"use client"; + +import * as React from "react"; +import { + CaretSortIcon, + CheckIcon, + ChevronDownIcon, + ChevronUpIcon, +} from "@radix-ui/react-icons"; +import * as SelectPrimitive from "@radix-ui/react-select"; + +import { cn } from "@/lib/utils"; + +const Select = SelectPrimitive.Root; + +const SelectGroup = SelectPrimitive.Group; + +const SelectValue = SelectPrimitive.Value; + +const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + span]:line-clamp-1 dark:border-neutral-800 dark:ring-offset-neutral-950 dark:placeholder:text-neutral-400 dark:focus:ring-neutral-300", + className, + )} + {...props} + > + {children} + + + + +)); +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; + +const SelectScrollUpButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName; + +const SelectScrollDownButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName; + +const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + + + + {children} + + + + +)); +SelectContent.displayName = SelectPrimitive.Content.displayName; + +const SelectLabel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectLabel.displayName = SelectPrimitive.Label.displayName; + +const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + + + + + {children} + +)); +SelectItem.displayName = SelectPrimitive.Item.displayName; + +const SelectSeparator = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectSeparator.displayName = SelectPrimitive.Separator.displayName; + +export { + Select, + SelectGroup, + SelectValue, + SelectTrigger, + SelectContent, + SelectLabel, + SelectItem, + SelectSeparator, + SelectScrollUpButton, + SelectScrollDownButton, +}; diff --git a/rnd/autogpt_builder/src/lib/autogpt-server-api/client.ts b/rnd/autogpt_builder/src/lib/autogpt-server-api/client.ts index b2f4192806..157a4a3e3b 100644 --- a/rnd/autogpt_builder/src/lib/autogpt-server-api/client.ts +++ b/rnd/autogpt_builder/src/lib/autogpt-server-api/client.ts @@ -1,3 +1,4 @@ +import { createClient } from "../supabase/client"; import { Block, Graph, @@ -11,8 +12,10 @@ import { export default class AutoGPTServerAPI { private baseUrl: string; private wsUrl: string; - private socket: WebSocket | null = null; - private messageHandlers: { [key: string]: (data: any) => void } = {}; + private webSocket: WebSocket | null = null; + private wsConnecting: Promise | null = null; + private wsMessageHandlers: { [key: string]: (data: any) => void } = {}; + private supabaseClient = createClient(); constructor( baseUrl: string = process.env.NEXT_PUBLIC_AGPT_SERVER_URL || @@ -141,18 +144,23 @@ export default class AutoGPTServerAPI { console.debug(`${method} ${path} payload:`, payload); } - const response = await fetch( - this.baseUrl + path, - method != "GET" - ? { - method, - headers: { + const token = + (await this.supabaseClient?.auth.getSession())?.data.session + ?.access_token || ""; + + const response = await fetch(this.baseUrl + path, { + method, + headers: + method != "GET" + ? { "Content-Type": "application/json", + Authorization: token ? `Bearer ${token}` : "", + } + : { + Authorization: token ? `Bearer ${token}` : "", }, - body: JSON.stringify(payload), - } - : undefined, - ); + body: JSON.stringify(payload), + }); const response_data = await response.json(); if (!response.ok) { @@ -166,37 +174,48 @@ export default class AutoGPTServerAPI { return response_data; } - connectWebSocket(): Promise { - return new Promise((resolve, reject) => { - this.socket = new WebSocket(this.wsUrl); + async connectWebSocket(): Promise { + this.wsConnecting ??= new Promise(async (resolve, reject) => { + try { + const token = + (await this.supabaseClient?.auth.getSession())?.data.session + ?.access_token || ""; - this.socket.onopen = () => { - console.log("WebSocket connection established"); - resolve(); - }; + const wsUrlWithToken = `${this.wsUrl}?token=${token}`; + this.webSocket = new WebSocket(wsUrlWithToken); - this.socket.onclose = (event) => { - console.log("WebSocket connection closed", event); - this.socket = null; - }; + this.webSocket.onopen = () => { + console.log("WebSocket connection established"); + resolve(); + }; - this.socket.onerror = (error) => { - console.error("WebSocket error:", error); + this.webSocket.onclose = (event) => { + console.log("WebSocket connection closed", event); + this.webSocket = null; + }; + + this.webSocket.onerror = (error) => { + console.error("WebSocket error:", error); + reject(error); + }; + + this.webSocket.onmessage = (event) => { + const message = JSON.parse(event.data); + if (this.wsMessageHandlers[message.method]) { + this.wsMessageHandlers[message.method](message.data); + } + }; + } catch (error) { + console.error("Error connecting to WebSocket:", error); reject(error); - }; - - this.socket.onmessage = (event) => { - const message = JSON.parse(event.data); - if (this.messageHandlers[message.method]) { - this.messageHandlers[message.method](message.data); - } - }; + } }); + return this.wsConnecting; } disconnectWebSocket() { - if (this.socket && this.socket.readyState === WebSocket.OPEN) { - this.socket.close(); + if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) { + this.webSocket.close(); } } @@ -204,10 +223,12 @@ export default class AutoGPTServerAPI { method: M, data: WebsocketMessageTypeMap[M], ) { - if (this.socket && this.socket.readyState === WebSocket.OPEN) { - this.socket.send(JSON.stringify({ method, data })); + if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) { + this.webSocket.send(JSON.stringify({ method, data })); } else { - console.error("WebSocket is not connected"); + this.connectWebSocket().then(() => + this.sendWebSocketMessage(method, data), + ); } } @@ -215,7 +236,7 @@ export default class AutoGPTServerAPI { method: M, handler: (data: WebsocketMessageTypeMap[M]) => void, ) { - this.messageHandlers[method] = handler; + this.wsMessageHandlers[method] = handler; } subscribeToExecution(graphId: string) { diff --git a/rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts b/rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts index f4ad2dc39d..c79de13648 100644 --- a/rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts +++ b/rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts @@ -9,60 +9,86 @@ export type Block = { export type BlockIORootSchema = { type: "object"; - properties: { [key: string]: BlockIOSchema }; + properties: { [key: string]: BlockIOSubSchema }; required?: string[]; additionalProperties?: { type: string }; }; -export type BlockIOSchema = { +export type BlockIOSubSchema = + | BlockIOSimpleTypeSubSchema + | BlockIOCombinedTypeSubSchema; + +type BlockIOSimpleTypeSubSchema = + | BlockIOObjectSubSchema + | BlockIOKVSubSchema + | BlockIOArraySubSchema + | BlockIOStringSubSchema + | BlockIONumberSubSchema + | BlockIOBooleanSubSchema + | BlockIONullSubSchema; + +type BlockIOSubSchemaMeta = { title?: string; description?: string; placeholder?: string; -} & (BlockIOSimpleTypeSchema | BlockIOCombinedTypeSchema); +}; -type BlockIOSimpleTypeSchema = - | { - type: "object"; - properties: { [key: string]: BlockIOSchema }; - required?: string[]; - additionalProperties?: { type: string }; - } - | { - type: "array"; - items?: BlockIOSimpleTypeSchema; - } - | { - type: "string"; - enum?: string[]; - secret?: true; - default?: string; - } - | { - type: "integer" | "number"; - default?: number; - } - | { - type: "boolean"; - default?: boolean; - } - | { - type: "null"; - }; +export type BlockIOObjectSubSchema = BlockIOSubSchemaMeta & { + type: "object"; + properties: { [key: string]: BlockIOSubSchema }; + default?: { [key: keyof BlockIOObjectSubSchema["properties"]]: any }; + required?: keyof BlockIOObjectSubSchema["properties"][]; +}; + +export type BlockIOKVSubSchema = BlockIOSubSchemaMeta & { + type: "object"; + additionalProperties: { type: "string" | "number" | "integer" }; + default?: { [key: string]: string | number }; +}; + +export type BlockIOArraySubSchema = BlockIOSubSchemaMeta & { + type: "array"; + items?: BlockIOSimpleTypeSubSchema; + default?: Array; +}; + +export type BlockIOStringSubSchema = BlockIOSubSchemaMeta & { + type: "string"; + enum?: string[]; + secret?: true; + default?: string; +}; + +export type BlockIONumberSubSchema = BlockIOSubSchemaMeta & { + type: "integer" | "number"; + default?: number; +}; + +export type BlockIOBooleanSubSchema = BlockIOSubSchemaMeta & { + type: "boolean"; + default?: boolean; +}; + +export type BlockIONullSubSchema = BlockIOSubSchemaMeta & { + type: "null"; +}; // At the time of writing, combined schemas only occur on the first nested level in a // block schema. It is typed this way to make the use of these objects less tedious. -type BlockIOCombinedTypeSchema = - | { - allOf: [BlockIOSimpleTypeSchema]; - } - | { - anyOf: BlockIOSimpleTypeSchema[]; - default?: string | number | boolean | null; - } - | { - oneOf: BlockIOSimpleTypeSchema[]; - default?: string | number | boolean | null; - }; +type BlockIOCombinedTypeSubSchema = BlockIOSubSchemaMeta & + ( + | { + allOf: [BlockIOSimpleTypeSubSchema]; + } + | { + anyOf: BlockIOSimpleTypeSubSchema[]; + default?: string | number | boolean | null; + } + | { + oneOf: BlockIOSimpleTypeSubSchema[]; + default?: string | number | boolean | null; + } + ); /* Mirror of autogpt_server/data/graph.py:Node */ export type Node = { diff --git a/rnd/autogpt_builder/src/lib/marketplace-api/client.ts b/rnd/autogpt_builder/src/lib/marketplace-api/client.ts index cc5240d593..7db6f32bd6 100644 --- a/rnd/autogpt_builder/src/lib/marketplace-api/client.ts +++ b/rnd/autogpt_builder/src/lib/marketplace-api/client.ts @@ -1,3 +1,4 @@ +import { createClient } from "../supabase/client"; import { AddAgentRequest, AgentResponse, @@ -9,6 +10,7 @@ import { export default class MarketplaceAPI { private baseUrl: string; + private supabaseClient = createClient(); constructor( baseUrl: string = process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL || @@ -140,18 +142,24 @@ export default class MarketplaceAPI { console.debug(`${method} ${path} payload:`, payload); } - const response = await fetch( - this.baseUrl + path, - method != "GET" - ? { - method, - headers: { + const token = + (await this.supabaseClient?.auth.getSession())?.data.session + ?.access_token || ""; + + const response = await fetch(this.baseUrl + path, { + method, + headers: + method != "GET" + ? { "Content-Type": "application/json", + Authorization: token ? `Bearer ${token}` : "", + } + : { + Authorization: token ? `Bearer ${token}` : "", }, - body: JSON.stringify(payload), - } - : undefined, - ); + body: JSON.stringify(payload), + }); + const response_data = await response.json(); if (!response.ok) { diff --git a/rnd/autogpt_builder/src/lib/types.ts b/rnd/autogpt_builder/src/lib/types.ts new file mode 100644 index 0000000000..04750a5973 --- /dev/null +++ b/rnd/autogpt_builder/src/lib/types.ts @@ -0,0 +1,13 @@ +import { NodeExecutionResult } from "@/lib/autogpt-server-api"; + +export type FlowRun = { + id: string; + graphID: string; + graphVersion: number; + status: "running" | "waiting" | "success" | "failed"; + startTime: number; // unix timestamp (ms) + endTime: number; // unix timestamp (ms) + duration: number; // seconds + totalRunTime: number; // seconds + nodeExecutionResults: NodeExecutionResult[]; +}; diff --git a/rnd/autogpt_builder/src/lib/utils.ts b/rnd/autogpt_builder/src/lib/utils.ts index 6bf9c9c46c..7d7639c5b7 100644 --- a/rnd/autogpt_builder/src/lib/utils.ts +++ b/rnd/autogpt_builder/src/lib/utils.ts @@ -45,6 +45,7 @@ export function getTypeTextColor(type: string | null): string { object: "text-purple-500", array: "text-indigo-500", null: "text-gray-500", + any: "text-gray-500", "": "text-gray-500", }[type] || "text-gray-500" ); @@ -61,6 +62,7 @@ export function getTypeBgColor(type: string | null): string { object: "bg-purple-500", array: "bg-indigo-500", null: "bg-gray-500", + any: "bg-gray-500", "": "bg-gray-500", }[type] || "bg-gray-500" ); @@ -76,6 +78,7 @@ export function getTypeColor(type: string | null): string { object: "#a855f7", array: "#6366f1", null: "#6b7280", + any: "#6b7280", "": "#6b7280", }[type] || "#6b7280" ); diff --git a/rnd/autogpt_builder/yarn.lock b/rnd/autogpt_builder/yarn.lock index 78e24778e2..3aad91e49d 100644 --- a/rnd/autogpt_builder/yarn.lock +++ b/rnd/autogpt_builder/yarn.lock @@ -466,6 +466,33 @@ "@radix-ui/react-use-callback-ref" "1.1.0" "@radix-ui/react-use-controllable-state" "1.1.0" +"@radix-ui/react-select@^2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@radix-ui/react-select/-/react-select-2.1.1.tgz#df05cb0b29d3deaef83b505917c4042e0e418a9f" + integrity sha512-8iRDfyLtzxlprOo9IicnzvpsO1wNCkuwzzCM+Z5Rb5tNOpCdMvcc2AkzX0Fz+Tz9v6NJ5B/7EEgyZveo4FBRfQ== + dependencies: + "@radix-ui/number" "1.1.0" + "@radix-ui/primitive" "1.1.0" + "@radix-ui/react-collection" "1.1.0" + "@radix-ui/react-compose-refs" "1.1.0" + "@radix-ui/react-context" "1.1.0" + "@radix-ui/react-direction" "1.1.0" + "@radix-ui/react-dismissable-layer" "1.1.0" + "@radix-ui/react-focus-guards" "1.1.0" + "@radix-ui/react-focus-scope" "1.1.0" + "@radix-ui/react-id" "1.1.0" + "@radix-ui/react-popper" "1.2.0" + "@radix-ui/react-portal" "1.1.1" + "@radix-ui/react-primitive" "2.0.0" + "@radix-ui/react-slot" "1.1.0" + "@radix-ui/react-use-callback-ref" "1.1.0" + "@radix-ui/react-use-controllable-state" "1.1.0" + "@radix-ui/react-use-layout-effect" "1.1.0" + "@radix-ui/react-use-previous" "1.1.0" + "@radix-ui/react-visually-hidden" "1.1.0" + aria-hidden "^1.1.1" + react-remove-scroll "2.5.7" + "@radix-ui/react-scroll-area@^1.1.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@radix-ui/react-scroll-area/-/react-scroll-area-1.1.0.tgz#50b24b0fc9ada151d176395bcf47b2ec68feada5" diff --git a/rnd/autogpt_libs/autogpt_libs/auth/__init__.py b/rnd/autogpt_libs/autogpt_libs/auth/__init__.py index e69de29bb2..5090cb4f03 100644 --- a/rnd/autogpt_libs/autogpt_libs/auth/__init__.py +++ b/rnd/autogpt_libs/autogpt_libs/auth/__init__.py @@ -0,0 +1,14 @@ +from .config import Settings +from .depends import requires_admin_user, requires_user +from .jwt_utils import parse_jwt_token +from .middleware import auth_middleware +from .models import User + +__all__ = [ + "Settings", + "parse_jwt_token", + "requires_user", + "requires_admin_user", + "auth_middleware", + "User", +] diff --git a/rnd/autogpt_libs/autogpt_libs/auth/config.py b/rnd/autogpt_libs/autogpt_libs/auth/config.py index 71e298451f..1c7bc18290 100644 --- a/rnd/autogpt_libs/autogpt_libs/auth/config.py +++ b/rnd/autogpt_libs/autogpt_libs/auth/config.py @@ -1,8 +1,10 @@ import os + from dotenv import load_dotenv load_dotenv() + class Settings: JWT_SECRET_KEY: str = os.getenv("SUPABASE_JWT_SECRET", "") ENABLE_AUTH: bool = os.getenv("ENABLE_AUTH", "false").lower() == "true" diff --git a/rnd/autogpt_libs/autogpt_libs/auth/depends.py b/rnd/autogpt_libs/autogpt_libs/auth/depends.py new file mode 100644 index 0000000000..cd5cb09615 --- /dev/null +++ b/rnd/autogpt_libs/autogpt_libs/auth/depends.py @@ -0,0 +1,32 @@ +import fastapi + +from .middleware import auth_middleware +from .models import User + + +def requires_user(payload: dict = fastapi.Depends(auth_middleware)) -> User: + return verify_user(payload, admin_only=False) + + +def requires_admin_user( + payload: dict = fastapi.Depends(auth_middleware), +) -> User: + return verify_user(payload, admin_only=True) + + +def verify_user(payload: dict | None, admin_only: bool) -> User: + if not payload: + # This handles the case when authentication is disabled + payload = {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"} + + user_id = payload.get("sub") + + if not user_id: + raise fastapi.HTTPException( + status_code=401, detail="User ID not found in token" + ) + + if admin_only and payload["role"] != "admin": + raise fastapi.HTTPException(status_code=403, detail="Admin access required") + + return User.from_payload(payload) diff --git a/rnd/autogpt_libs/autogpt_libs/auth/depends_tests.py b/rnd/autogpt_libs/autogpt_libs/auth/depends_tests.py new file mode 100644 index 0000000000..8e2c10d127 --- /dev/null +++ b/rnd/autogpt_libs/autogpt_libs/auth/depends_tests.py @@ -0,0 +1,68 @@ +import pytest + +from .depends import verify_user, requires_admin_user, requires_user + + +def test_verify_user_no_payload(): + user = verify_user(None, admin_only=False) + assert user.user_id == "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + assert user.role == "admin" + + +def test_verify_user_no_user_id(): + with pytest.raises(Exception): + verify_user({"role": "admin"}, admin_only=False) + + +def test_verify_user_not_admin(): + with pytest.raises(Exception): + verify_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "user"}, + admin_only=True, + ) + + +def test_verify_user_with_admin_role(): + user = verify_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"}, + admin_only=True, + ) + assert user.user_id == "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + assert user.role == "admin" + + +def test_verify_user_with_user_role(): + user = verify_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "user"}, + admin_only=False, + ) + assert user.user_id == "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + assert user.role == "user" + + +def test_requires_user(): + user = requires_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "user"} + ) + assert user.user_id == "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + assert user.role == "user" + + +def test_requires_user_no_user_id(): + with pytest.raises(Exception): + requires_user({"role": "user"}) + + +def test_requires_admin_user(): + user = requires_admin_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"} + ) + assert user.user_id == "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + assert user.role == "admin" + + +def test_requires_admin_user_not_admin(): + with pytest.raises(Exception): + requires_admin_user( + {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "user"} + ) diff --git a/rnd/autogpt_libs/autogpt_libs/auth/jwt_utils.py b/rnd/autogpt_libs/autogpt_libs/auth/jwt_utils.py index ae414ae802..900275f0bb 100644 --- a/rnd/autogpt_libs/autogpt_libs/auth/jwt_utils.py +++ b/rnd/autogpt_libs/autogpt_libs/auth/jwt_utils.py @@ -1,5 +1,7 @@ +from typing import Any, Dict + import jwt -from typing import Dict, Any + from .config import settings @@ -12,7 +14,12 @@ def parse_jwt_token(token: str) -> Dict[str, Any]: :raises ValueError: If the token is invalid or expired """ try: - payload = jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) + payload = jwt.decode( + token, + settings.JWT_SECRET_KEY, + algorithms=[settings.JWT_ALGORITHM], + audience="authenticated", + ) return payload except jwt.ExpiredSignatureError: raise ValueError("Token has expired") diff --git a/rnd/autogpt_libs/autogpt_libs/auth/middleware.py b/rnd/autogpt_libs/autogpt_libs/auth/middleware.py index f474d55271..84ea52f0b7 100644 --- a/rnd/autogpt_libs/autogpt_libs/auth/middleware.py +++ b/rnd/autogpt_libs/autogpt_libs/auth/middleware.py @@ -1,11 +1,14 @@ import logging -from fastapi import Request, HTTPException, Depends -from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials -from .jwt_utils import parse_jwt_token +from fastapi import HTTPException, Request +from fastapi.security import HTTPBearer + from .config import settings +from .jwt_utils import parse_jwt_token security = HTTPBearer() + + async def auth_middleware(request: Request): if not settings.ENABLE_AUTH: # If authentication is disabled, allow the request to proceed diff --git a/rnd/autogpt_libs/autogpt_libs/auth/models.py b/rnd/autogpt_libs/autogpt_libs/auth/models.py new file mode 100644 index 0000000000..88bed4a198 --- /dev/null +++ b/rnd/autogpt_libs/autogpt_libs/auth/models.py @@ -0,0 +1,19 @@ +from dataclasses import dataclass + + +# Using dataclass here to avoid adding dependency on pydantic +@dataclass(frozen=True) +class User: + user_id: str + email: str + phone_number: str + role: str + + @classmethod + def from_payload(cls, payload): + return cls( + user_id=payload["sub"], + email=payload.get("email", ""), + phone_number=payload.get("phone", ""), + role=payload["role"], + ) diff --git a/rnd/autogpt_server/.env.template b/rnd/autogpt_server/.env.template index 8b1ce0ee8f..9a5ed2fc0f 100644 --- a/rnd/autogpt_server/.env.template +++ b/rnd/autogpt_server/.env.template @@ -10,4 +10,10 @@ REDDIT_USERNAME= REDDIT_PASSWORD= # Discord -DISCORD_BOT_TOKEN= \ No newline at end of file +DISCORD_BOT_TOKEN= + +# SMTP/Email +SMTP_SERVER= +SMTP_PORT= +SMTP_USERNAME= +SMTP_PASSWORD= \ No newline at end of file diff --git a/rnd/autogpt_server/autogpt_server/blocks/__init__.py b/rnd/autogpt_server/autogpt_server/blocks/__init__.py index f177b60bd0..e2ac27bd1a 100644 --- a/rnd/autogpt_server/autogpt_server/blocks/__init__.py +++ b/rnd/autogpt_server/autogpt_server/blocks/__init__.py @@ -1,6 +1,7 @@ import glob import importlib import os +import re from pathlib import Path from autogpt_server.data.block import Block @@ -15,6 +16,11 @@ modules = [ if os.path.isfile(f) and f.endswith(".py") and not f.endswith("__init__.py") ] for module in modules: + if not re.match("^[a-z_]+$", module): + raise ValueError( + f"Block module {module} error: module name must be lowercase, separated by underscores, and contain only alphabet characters" + ) + importlib.import_module(f".{module}", package=__name__) AVAILABLE_MODULES.append(module) @@ -30,9 +36,16 @@ def all_subclasses(clz): for cls in all_subclasses(Block): - if not cls.__name__.endswith("Block"): + name = cls.__name__ + + if cls.__name__.endswith("Base"): continue + if not cls.__name__.endswith("Block"): + raise ValueError( + f"Block class {cls.__name__} does not end with 'Block', If you are creating an abstract class, please name the class with 'Base' at the end" + ) + block = cls() if not isinstance(block.id, str) or len(block.id) != 36: diff --git a/rnd/autogpt_server/autogpt_server/blocks/if_block.py b/rnd/autogpt_server/autogpt_server/blocks/branching.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/if_block.py rename to rnd/autogpt_server/autogpt_server/blocks/branching.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/read_csv.py b/rnd/autogpt_server/autogpt_server/blocks/csv.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/read_csv.py rename to rnd/autogpt_server/autogpt_server/blocks/csv.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/discordblock.py b/rnd/autogpt_server/autogpt_server/blocks/discord.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/discordblock.py rename to rnd/autogpt_server/autogpt_server/blocks/discord.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/email_block.py b/rnd/autogpt_server/autogpt_server/blocks/email_block.py new file mode 100644 index 0000000000..d73708887a --- /dev/null +++ b/rnd/autogpt_server/autogpt_server/blocks/email_block.py @@ -0,0 +1,101 @@ +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText + +from pydantic import BaseModel, ConfigDict, Field + +from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema +from autogpt_server.data.model import BlockSecret, SchemaField, SecretField + + +class EmailCredentials(BaseModel): + smtp_server: str = Field( + default="smtp.gmail.com", description="SMTP server address" + ) + smtp_port: int = Field(default=25, description="SMTP port number") + smtp_username: BlockSecret = SecretField(key="smtp_username") + smtp_password: BlockSecret = SecretField(key="smtp_password") + + model_config = ConfigDict(title="Email Credentials") + + +class SendEmailBlock(Block): + class Input(BlockSchema): + to_email: str = SchemaField( + description="Recipient email address", placeholder="recipient@example.com" + ) + subject: str = SchemaField( + description="Subject of the email", placeholder="Enter the email subject" + ) + body: str = SchemaField( + description="Body of the email", placeholder="Enter the email body" + ) + creds: EmailCredentials = Field( + description="SMTP credentials", + default=EmailCredentials(), + ) + + class Output(BlockSchema): + status: str = SchemaField(description="Status of the email sending operation") + error: str = SchemaField( + description="Error message if the email sending failed" + ) + + def __init__(self): + super().__init__( + id="a1234567-89ab-cdef-0123-456789abcdef", + description="This block sends an email using the provided SMTP credentials.", + categories={BlockCategory.TEXT}, + input_schema=SendEmailBlock.Input, + output_schema=SendEmailBlock.Output, + test_input={ + "to_email": "recipient@example.com", + "subject": "Test Email", + "body": "This is a test email.", + "creds": { + "smtp_server": "smtp.gmail.com", + "smtp_port": 25, + "smtp_username": "your-email@gmail.com", + "smtp_password": "your-gmail-password", + }, + }, + test_output=[("status", "Email sent successfully")], + test_mock={"send_email": lambda *args, **kwargs: "Email sent successfully"}, + ) + + @staticmethod + def send_email( + creds: EmailCredentials, to_email: str, subject: str, body: str + ) -> str: + try: + smtp_server = creds.smtp_server + smtp_port = creds.smtp_port + smtp_username = creds.smtp_username.get_secret_value() + smtp_password = creds.smtp_password.get_secret_value() + + msg = MIMEMultipart() + msg["From"] = smtp_username + msg["To"] = to_email + msg["Subject"] = subject + msg.attach(MIMEText(body, "plain")) + + with smtplib.SMTP(smtp_server, smtp_port) as server: + server.starttls() + server.login(smtp_username, smtp_password) + server.sendmail(smtp_username, to_email, msg.as_string()) + + return "Email sent successfully" + except Exception as e: + return f"Failed to send email: {str(e)}" + + def run(self, input_data: Input) -> BlockOutput: + status = self.send_email( + input_data.creds, + input_data.to_email, + input_data.subject, + input_data.body, + ) + if "successfully" in status: + yield "status", status + else: + yield "error", status diff --git a/rnd/autogpt_server/autogpt_server/blocks/foreach_block.py b/rnd/autogpt_server/autogpt_server/blocks/iteration.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/foreach_block.py rename to rnd/autogpt_server/autogpt_server/blocks/iteration.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/create_medium_post.py b/rnd/autogpt_server/autogpt_server/blocks/medium.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/create_medium_post.py rename to rnd/autogpt_server/autogpt_server/blocks/medium.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/rss-reader-block.py b/rnd/autogpt_server/autogpt_server/blocks/rss.py similarity index 100% rename from rnd/autogpt_server/autogpt_server/blocks/rss-reader-block.py rename to rnd/autogpt_server/autogpt_server/blocks/rss.py diff --git a/rnd/autogpt_server/autogpt_server/blocks/search.py b/rnd/autogpt_server/autogpt_server/blocks/search.py index 46cd3b4867..21c457323b 100644 --- a/rnd/autogpt_server/autogpt_server/blocks/search.py +++ b/rnd/autogpt_server/autogpt_server/blocks/search.py @@ -131,7 +131,7 @@ class WebScraperBlock(Block, GetRequest): yield "error", f"Request to Jina-ai Reader failed: {e}" -class GetOpenWeatherMapWeather(Block, GetRequest): +class GetOpenWeatherMapBlock(Block, GetRequest): class Input(BlockSchema): location: str api_key: BlockSecret = SecretField(key="openweathermap_api_key") @@ -146,8 +146,8 @@ class GetOpenWeatherMapWeather(Block, GetRequest): def __init__(self): super().__init__( id="f7a8b2c3-6d4e-5f8b-9e7f-6d4e5f8b9e7f", - input_schema=GetOpenWeatherMapWeather.Input, - output_schema=GetOpenWeatherMapWeather.Output, + input_schema=GetOpenWeatherMapBlock.Input, + output_schema=GetOpenWeatherMapBlock.Output, test_input={ "location": "New York", "api_key": "YOUR_API_KEY", diff --git a/rnd/autogpt_server/autogpt_server/blocks/time_blocks.py b/rnd/autogpt_server/autogpt_server/blocks/time_blocks.py new file mode 100644 index 0000000000..dcce127d8e --- /dev/null +++ b/rnd/autogpt_server/autogpt_server/blocks/time_blocks.py @@ -0,0 +1,139 @@ +import time +from datetime import datetime, timedelta +from typing import Union + +from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema + + +class CurrentTimeBlock(Block): + class Input(BlockSchema): + trigger: str + + class Output(BlockSchema): + time: str + + def __init__(self): + super().__init__( + id="a892b8d9-3e4e-4e9c-9c1e-75f8efcf1bfa", + description="This block outputs the current time.", + categories={BlockCategory.TEXT}, + input_schema=CurrentTimeBlock.Input, + output_schema=CurrentTimeBlock.Output, + test_input=[ + {"trigger": "Hello", "format": "{time}"}, + ], + test_output=[ + ("time", time.strftime("%H:%M:%S")), + ], + ) + + def run(self, input_data: Input) -> BlockOutput: + current_time = time.strftime("%H:%M:%S") + yield "time", current_time + + +class CurrentDateBlock(Block): + class Input(BlockSchema): + trigger: str + offset: Union[int, str] + + class Output(BlockSchema): + date: str + + def __init__(self): + super().__init__( + id="b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0b1", + description="This block outputs the current date with an optional offset.", + categories={BlockCategory.TEXT}, + input_schema=CurrentDateBlock.Input, + output_schema=CurrentDateBlock.Output, + test_input=[ + {"trigger": "Hello", "format": "{date}", "offset": "7"}, + ], + test_output=[ + ( + "date", + lambda t: abs(datetime.now() - datetime.strptime(t, "%Y-%m-%d")) + < timedelta(days=8), # 7 days difference + 1 day error margin. + ), + ], + ) + + def run(self, input_data: Input) -> BlockOutput: + try: + offset = int(input_data.offset) + except ValueError: + offset = 0 + current_date = datetime.now() - timedelta(days=offset) + yield "date", current_date.strftime("%Y-%m-%d") + + +class CurrentDateAndTimeBlock(Block): + class Input(BlockSchema): + trigger: str + + class Output(BlockSchema): + date_time: str + + def __init__(self): + super().__init__( + id="b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0h2", + description="This block outputs the current date and time.", + categories={BlockCategory.TEXT}, + input_schema=CurrentDateAndTimeBlock.Input, + output_schema=CurrentDateAndTimeBlock.Output, + test_input=[ + {"trigger": "Hello", "format": "{date_time}"}, + ], + test_output=[ + ( + "date_time", + lambda t: abs( + datetime.now() - datetime.strptime(t, "%Y-%m-%d %H:%M:%S") + ) + < timedelta(seconds=10), # 10 seconds error margin. + ), + ], + ) + + def run(self, input_data: Input) -> BlockOutput: + current_date_time = time.strftime("%Y-%m-%d %H:%M:%S") + yield "date_time", current_date_time + + +class TimerBlock(Block): + class Input(BlockSchema): + seconds: Union[int, str] = 0 + minutes: Union[int, str] = 0 + hours: Union[int, str] = 0 + days: Union[int, str] = 0 + + class Output(BlockSchema): + message: str + + def __init__(self): + super().__init__( + id="d67a9c52-5e4e-11e2-bcfd-0800200c9a71", + description="This block triggers after a specified duration.", + categories={BlockCategory.TEXT}, + input_schema=TimerBlock.Input, + output_schema=TimerBlock.Output, + test_input=[ + {"seconds": 1}, + ], + test_output=[ + ("message", "timer finished"), + ], + ) + + def run(self, input_data: Input) -> BlockOutput: + + seconds = int(input_data.seconds) + minutes = int(input_data.minutes) + hours = int(input_data.hours) + days = int(input_data.days) + + total_seconds = seconds + minutes * 60 + hours * 3600 + days * 86400 + + time.sleep(total_seconds) + yield "message", "timer finished" diff --git a/rnd/autogpt_server/autogpt_server/blocks/youtube_transcriber.py b/rnd/autogpt_server/autogpt_server/blocks/youtube.py similarity index 94% rename from rnd/autogpt_server/autogpt_server/blocks/youtube_transcriber.py rename to rnd/autogpt_server/autogpt_server/blocks/youtube.py index 3085085514..91248152c0 100644 --- a/rnd/autogpt_server/autogpt_server/blocks/youtube_transcriber.py +++ b/rnd/autogpt_server/autogpt_server/blocks/youtube.py @@ -7,7 +7,7 @@ from autogpt_server.data.block import Block, BlockOutput, BlockSchema from autogpt_server.data.model import SchemaField -class YouTubeTranscriber(Block): +class YouTubeTranscriberBlock(Block): class Input(BlockSchema): youtube_url: str = SchemaField( description="The URL of the YouTube video to transcribe", @@ -24,8 +24,8 @@ class YouTubeTranscriber(Block): def __init__(self): super().__init__( id="f3a8f7e1-4b1d-4e5f-9f2a-7c3d5a2e6b4c", - input_schema=YouTubeTranscriber.Input, - output_schema=YouTubeTranscriber.Output, + input_schema=YouTubeTranscriberBlock.Input, + output_schema=YouTubeTranscriberBlock.Output, test_input={"youtube_url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ"}, test_output=[ ("video_id", "dQw4w9WgXcQ"), diff --git a/rnd/autogpt_server/autogpt_server/data/graph.py b/rnd/autogpt_server/autogpt_server/data/graph.py index 7109d6b1bb..75ce276969 100644 --- a/rnd/autogpt_server/autogpt_server/data/graph.py +++ b/rnd/autogpt_server/autogpt_server/data/graph.py @@ -272,7 +272,8 @@ async def get_node(node_id: str) -> Node | None: async def get_graphs_meta( - filter_by: Literal["active", "template"] | None = "active" + filter_by: Literal["active", "template"] | None = "active", + user_id: str | None = None, ) -> list[GraphMeta]: """ Retrieves graph metadata objects. @@ -291,6 +292,9 @@ async def get_graphs_meta( elif filter_by == "template": where_clause["isTemplate"] = True + if user_id and filter_by != "template": + where_clause["userId"] = user_id + graphs = await AgentGraph.prisma().find_many( where=where_clause, distinct=["id"], @@ -304,7 +308,10 @@ async def get_graphs_meta( async def get_graph( - graph_id: str, version: int | None = None, template: bool = False + graph_id: str, + version: int | None = None, + template: bool = False, + user_id: str | None = None, ) -> Graph | None: """ Retrieves a graph from the DB. @@ -322,6 +329,9 @@ async def get_graph( elif not template: where_clause["isActive"] = True + if user_id and not template: + where_clause["userId"] = user_id + graph = await AgentGraph.prisma().find_first( where=where_clause, include=AGENT_GRAPH_INCLUDE, @@ -330,10 +340,23 @@ async def get_graph( return Graph.from_db(graph) if graph else None -async def set_graph_active_version(graph_id: str, version: int) -> None: +async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None: + # Check if the graph belongs to the user + graph = await AgentGraph.prisma().find_first( + where={ + "id": graph_id, + "version": version, + "userId": user_id, + } + ) + if not graph: + raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user") + updated_graph = await AgentGraph.prisma().update( data={"isActive": True}, - where={"graphVersionId": {"id": graph_id, "version": version}}, + where={ + "graphVersionId": {"id": graph_id, "version": version}, + }, ) if not updated_graph: raise Exception(f"Graph #{graph_id} v{version} not found") @@ -341,13 +364,15 @@ async def set_graph_active_version(graph_id: str, version: int) -> None: # Deactivate all other versions await AgentGraph.prisma().update_many( data={"isActive": False}, - where={"id": graph_id, "version": {"not": version}}, + where={"id": graph_id, "version": {"not": version}, "userId": user_id}, ) -async def get_graph_all_versions(graph_id: str) -> list[Graph]: +async def get_graph_all_versions( + graph_id: str, user_id: str | None = None +) -> list[Graph]: graph_versions = await AgentGraph.prisma().find_many( - where={"id": graph_id}, + where={"id": graph_id, "userId": user_id}, order={"version": "desc"}, include=AGENT_GRAPH_INCLUDE, ) @@ -358,17 +383,19 @@ async def get_graph_all_versions(graph_id: str) -> list[Graph]: return [Graph.from_db(graph) for graph in graph_versions] -async def create_graph(graph: Graph) -> Graph: +async def create_graph(graph: Graph, user_id: str | None) -> Graph: async with transaction() as tx: - await __create_graph(tx, graph) + await __create_graph(tx, graph, user_id) - if created_graph := await get_graph(graph.id, graph.version, graph.is_template): + if created_graph := await get_graph( + graph.id, graph.version, graph.is_template, user_id=user_id + ): return created_graph raise ValueError(f"Created graph {graph.id} v{graph.version} is not in DB") -async def __create_graph(tx, graph: Graph): +async def __create_graph(tx, graph: Graph, user_id: str | None): await AgentGraph.prisma(tx).create( data={ "id": graph.id, @@ -377,6 +404,7 @@ async def __create_graph(tx, graph: Graph): "description": graph.description, "isTemplate": graph.is_template, "isActive": graph.is_active, + "userId": user_id, } ) @@ -391,6 +419,7 @@ async def __create_graph(tx, graph: Graph): "description": f"Sub-Graph of {graph.id}", "isTemplate": graph.is_template, "isActive": graph.is_active, + "userId": user_id, } ) for subgraph_id in graph.subgraphs @@ -453,5 +482,5 @@ async def import_packaged_templates() -> None: exists := next((t for t in templates_in_db if t.id == template.id), None) ) and exists.version >= template.version: continue - await create_graph(template) + await create_graph(template, None) print(f"Loaded template '{template.name}' ({template.id})") diff --git a/rnd/autogpt_server/autogpt_server/data/schedule.py b/rnd/autogpt_server/autogpt_server/data/schedule.py index bbec740dca..5491dbeaaa 100644 --- a/rnd/autogpt_server/autogpt_server/data/schedule.py +++ b/rnd/autogpt_server/autogpt_server/data/schedule.py @@ -10,6 +10,7 @@ from autogpt_server.util import json class ExecutionSchedule(BaseDbModel): graph_id: str + user_id: str graph_version: int schedule: str is_enabled: bool @@ -25,6 +26,7 @@ class ExecutionSchedule(BaseDbModel): return ExecutionSchedule( id=schedule.id, graph_id=schedule.agentGraphId, + user_id=schedule.userId, graph_version=schedule.agentGraphVersion, schedule=schedule.schedule, is_enabled=schedule.isEnabled, @@ -47,11 +49,12 @@ async def disable_schedule(schedule_id: str): ) -async def get_schedules(graph_id: str) -> list[ExecutionSchedule]: +async def get_schedules(graph_id: str, user_id: str) -> list[ExecutionSchedule]: query = AgentGraphExecutionSchedule.prisma().find_many( where={ "isEnabled": True, "agentGraphId": graph_id, + "userId": user_id, }, ) return [ExecutionSchedule.from_db(schedule) for schedule in await query] @@ -61,6 +64,7 @@ async def add_schedule(schedule: ExecutionSchedule) -> ExecutionSchedule: obj = await AgentGraphExecutionSchedule.prisma().create( data={ "id": schedule.id, + "userId": schedule.user_id, "agentGraphId": schedule.graph_id, "agentGraphVersion": schedule.graph_version, "schedule": schedule.schedule, @@ -71,7 +75,7 @@ async def add_schedule(schedule: ExecutionSchedule) -> ExecutionSchedule: return ExecutionSchedule.from_db(obj) -async def update_schedule(schedule_id: str, is_enabled: bool): +async def update_schedule(schedule_id: str, is_enabled: bool, user_id: str): await AgentGraphExecutionSchedule.prisma().update( where={"id": schedule_id}, data={"isEnabled": is_enabled} ) diff --git a/rnd/autogpt_server/autogpt_server/data/user.py b/rnd/autogpt_server/autogpt_server/data/user.py new file mode 100644 index 0000000000..93b2366c3c --- /dev/null +++ b/rnd/autogpt_server/autogpt_server/data/user.py @@ -0,0 +1,40 @@ +from typing import Optional + +from prisma.models import User + +from autogpt_server.data.db import prisma + + +async def get_or_create_user(user_data: dict) -> User: + user = await prisma.user.find_unique(where={"id": user_data["sub"]}) + if not user: + user = await prisma.user.create( + data={ + "id": user_data["sub"], + "email": user_data["email"], + "name": user_data.get("user_metadata", {}).get("name"), + } + ) + return User.model_validate(user) + + +async def get_user_by_id(user_id: str) -> Optional[User]: + user = await prisma.user.find_unique(where={"id": user_id}) + return User.model_validate(user) if user else None + + +async def create_default_user(enable_auth: str) -> Optional[User]: + if not enable_auth.lower() == "true": + user = await prisma.user.find_unique( + where={"id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a"} + ) + if not user: + user = await prisma.user.create( + data={ + "id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", + "email": "default@example.com", + "name": "Default User", + } + ) + return User.model_validate(user) + return None diff --git a/rnd/autogpt_server/autogpt_server/executor/manager.py b/rnd/autogpt_server/autogpt_server/executor/manager.py index ea7ccea1b8..6fbd1a6955 100644 --- a/rnd/autogpt_server/autogpt_server/executor/manager.py +++ b/rnd/autogpt_server/autogpt_server/executor/manager.py @@ -416,8 +416,10 @@ class ExecutionManager(AppService): return get_agent_server_client() @expose - def add_execution(self, graph_id: str, data: BlockInput) -> dict[Any, Any]: - graph: Graph | None = self.run_and_wait(get_graph(graph_id)) + def add_execution( + self, graph_id: str, data: BlockInput, user_id: str + ) -> dict[Any, Any]: + graph: Graph | None = self.run_and_wait(get_graph(graph_id, user_id=user_id)) if not graph: raise Exception(f"Graph #{graph_id} not found.") graph.validate_graph(for_run=True) diff --git a/rnd/autogpt_server/autogpt_server/executor/scheduler.py b/rnd/autogpt_server/autogpt_server/executor/scheduler.py index 360f756d14..e55aebf052 100644 --- a/rnd/autogpt_server/autogpt_server/executor/scheduler.py +++ b/rnd/autogpt_server/autogpt_server/executor/scheduler.py @@ -62,16 +62,22 @@ class ExecutionScheduler(AppService): logger.exception(f"Error executing graph {graph_id}: {e}") @expose - def update_schedule(self, schedule_id: str, is_enabled: bool) -> str: - self.run_and_wait(model.update_schedule(schedule_id, is_enabled)) + def update_schedule(self, schedule_id: str, is_enabled: bool, user_id: str) -> str: + self.run_and_wait(model.update_schedule(schedule_id, is_enabled, user_id)) return schedule_id @expose def add_execution_schedule( - self, graph_id: str, graph_version: int, cron: str, input_data: BlockInput + self, + graph_id: str, + graph_version: int, + cron: str, + input_data: BlockInput, + user_id: str, ) -> str: schedule = model.ExecutionSchedule( graph_id=graph_id, + user_id=user_id, graph_version=graph_version, schedule=cron, input_data=input_data, @@ -79,7 +85,7 @@ class ExecutionScheduler(AppService): return self.run_and_wait(model.add_schedule(schedule)).id @expose - def get_execution_schedules(self, graph_id: str) -> dict[str, str]: - query = model.get_schedules(graph_id) + def get_execution_schedules(self, graph_id: str, user_id: str) -> dict[str, str]: + query = model.get_schedules(graph_id, user_id=user_id) schedules: list[model.ExecutionSchedule] = self.run_and_wait(query) return {v.id: v.schedule for v in schedules} diff --git a/rnd/autogpt_server/autogpt_server/server/server.py b/rnd/autogpt_server/autogpt_server/server/server.py index 2fee638322..547df1ef04 100644 --- a/rnd/autogpt_server/autogpt_server/server/server.py +++ b/rnd/autogpt_server/autogpt_server/server/server.py @@ -1,10 +1,12 @@ import asyncio -import uuid +import inspect from collections import defaultdict from contextlib import asynccontextmanager +from functools import wraps from typing import Annotated, Any, Dict import uvicorn +from autogpt_libs.auth.jwt_utils import parse_jwt_token from autogpt_libs.auth.middleware import auth_middleware from fastapi import ( APIRouter, @@ -21,12 +23,14 @@ from fastapi.responses import JSONResponse import autogpt_server.server.ws_api from autogpt_server.data import block, db from autogpt_server.data import graph as graph_db +from autogpt_server.data import user as user_db from autogpt_server.data.block import BlockInput, CompletedBlockOutput from autogpt_server.data.execution import ( ExecutionResult, get_execution_results, list_executions, ) +from autogpt_server.data.user import get_or_create_user from autogpt_server.executor import ExecutionManager, ExecutionScheduler from autogpt_server.server.conn_manager import ConnectionManager from autogpt_server.server.model import ( @@ -39,12 +43,26 @@ from autogpt_server.util.lock import KeyedMutex from autogpt_server.util.service import AppService, expose, get_service_client from autogpt_server.util.settings import Settings +settings = Settings() + + +def get_user_id(payload: dict = Depends(auth_middleware)) -> str: + if not payload: + # This handles the case when authentication is disabled + return "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + + user_id = payload.get("sub") + if not user_id: + raise HTTPException(status_code=401, detail="User ID not found in token") + return user_id + class AgentServer(AppService): event_queue: asyncio.Queue[ExecutionResult] = asyncio.Queue() manager = ConnectionManager() mutex = KeyedMutex() use_db = False + _test_dependency_overrides = {} async def event_broadcaster(self): while True: @@ -56,6 +74,7 @@ class AgentServer(AppService): await db.connect() await block.initialize_blocks() await graph_db.import_packaged_templates() + await user_db.create_default_user(settings.config.enable_auth) asyncio.create_task(self.event_broadcaster()) yield await db.disconnect() @@ -72,6 +91,9 @@ class AgentServer(AppService): lifespan=self.lifespan, ) + if self._test_dependency_overrides: + app.dependency_overrides.update(self._test_dependency_overrides) + app.add_middleware( CORSMiddleware, allow_origins=["*"], # Allows all origins @@ -84,6 +106,12 @@ class AgentServer(AppService): router = APIRouter(prefix="/api") router.dependencies.append(Depends(auth_middleware)) + router.add_api_route( + path="/auth/user", + endpoint=self.get_or_create_user_route, + methods=["POST"], + ) + router.add_api_route( path="/blocks", endpoint=self.get_graph_blocks, # type: ignore @@ -201,6 +229,35 @@ class AgentServer(AppService): uvicorn.run(app, host="0.0.0.0", port=8000) + def set_test_dependency_overrides(self, overrides: dict): + self._test_dependency_overrides = overrides + + def _apply_overrides_to_methods(self): + for attr_name in dir(self): + attr = getattr(self, attr_name) + if callable(attr) and hasattr(attr, "__annotations__"): + setattr(self, attr_name, self._override_method(attr)) + + # TODO: fix this with some proper refactoring of the server + def _override_method(self, method): + @wraps(method) + async def wrapper(*args, **kwargs): + sig = inspect.signature(method) + for param_name, param in sig.parameters.items(): + if param.annotation is inspect.Parameter.empty: + continue + if isinstance(param.annotation, Depends) or ( # type: ignore + isinstance(param.annotation, type) and issubclass(param.annotation, Depends) # type: ignore + ): + dependency = param.annotation.dependency if isinstance(param.annotation, Depends) else param.annotation # type: ignore + if dependency in self._test_dependency_overrides: + kwargs[param_name] = self._test_dependency_overrides[ + dependency + ]() + return await method(*args, **kwargs) + + return wrapper + @property def execution_manager_client(self) -> ExecutionManager: return get_service_client(ExecutionManager) @@ -219,7 +276,30 @@ class AgentServer(AppService): status_code=500, ) + async def authenticate_websocket(self, websocket: WebSocket) -> str: + if settings.config.enable_auth.lower() == "true": + token = websocket.query_params.get("token") + if not token: + await websocket.close(code=4001, reason="Missing authentication token") + return "" + + try: + payload = parse_jwt_token(token) + user_id = payload.get("sub") + if not user_id: + await websocket.close(code=4002, reason="Invalid token") + return "" + return user_id + except ValueError: + await websocket.close(code=4003, reason="Invalid token") + return "" + else: + return "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + async def websocket_router(self, websocket: WebSocket): + user_id = await self.authenticate_websocket(websocket) + if not user_id: + return await self.manager.connect(websocket) try: while True: @@ -258,7 +338,7 @@ class AgentServer(AppService): ).model_dump_json() ) elif message.method == Methods.GET_GRAPHS: - data = await self.get_graphs() + data = await self.get_graphs(user_id=user_id) await websocket.send_text( WsMessage( method=Methods.GET_GRAPHS, @@ -269,7 +349,9 @@ class AgentServer(AppService): print("Get graphs request received") elif message.method == Methods.GET_GRAPH: assert isinstance(message.data, dict), "Data must be a dictionary" - data = await self.get_graph(message.data["graph_id"]) + data = await self.get_graph( + message.data["graph_id"], user_id=user_id + ) await websocket.send_text( WsMessage( method=Methods.GET_GRAPH, @@ -281,7 +363,7 @@ class AgentServer(AppService): elif message.method == Methods.CREATE_GRAPH: assert isinstance(message.data, dict), "Data must be a dictionary" create_graph = CreateGraph.model_validate(message.data) - data = await self.create_new_graph(create_graph) + data = await self.create_new_graph(create_graph, user_id=user_id) await websocket.send_text( WsMessage( method=Methods.CREATE_GRAPH, @@ -294,7 +376,7 @@ class AgentServer(AppService): elif message.method == Methods.RUN_GRAPH: assert isinstance(message.data, dict), "Data must be a dictionary" data = await self.execute_graph( - message.data["graph_id"], message.data["data"] + message.data["graph_id"], message.data["data"], user_id=user_id ) await websocket.send_text( WsMessage( @@ -307,7 +389,9 @@ class AgentServer(AppService): print("Run graph request received") elif message.method == Methods.GET_GRAPH_RUNS: assert isinstance(message.data, dict), "Data must be a dictionary" - data = await self.list_graph_runs(message.data["graph_id"]) + data = await self.list_graph_runs( + message.data["graph_id"], user_id=user_id + ) await websocket.send_text( WsMessage( method=Methods.GET_GRAPH_RUNS, @@ -323,6 +407,7 @@ class AgentServer(AppService): message.data["graph_id"], message.data["cron"], message.data["data"], + user_id=user_id, ) await websocket.send_text( WsMessage( @@ -335,7 +420,9 @@ class AgentServer(AppService): print("Create scheduled run request received") elif message.method == Methods.GET_SCHEDULED_RUNS: assert isinstance(message.data, dict), "Data must be a dictionary" - data = self.get_execution_schedules(message.data["graph_id"]) + data = self.get_execution_schedules( + message.data["graph_id"], user_id=user_id + ) await websocket.send_text( WsMessage( method=Methods.GET_SCHEDULED_RUNS, @@ -347,7 +434,7 @@ class AgentServer(AppService): elif message.method == Methods.UPDATE_SCHEDULED_RUN: assert isinstance(message.data, dict), "Data must be a dictionary" data = self.update_schedule( - message.data["schedule_id"], message.data + message.data["schedule_id"], message.data, user_id=user_id ) await websocket.send_text( WsMessage( @@ -386,6 +473,11 @@ class AgentServer(AppService): self.manager.disconnect(websocket) print("Client Disconnected") + @classmethod + async def get_or_create_user_route(cls, user_data: dict = Depends(auth_middleware)): + user = await get_or_create_user(user_data) + return user.model_dump() + @classmethod def get_graph_blocks(cls) -> list[dict[Any, Any]]: return [v.to_dict() for v in block.get_blocks().values()] # type: ignore @@ -404,8 +496,10 @@ class AgentServer(AppService): return output @classmethod - async def get_graphs(cls) -> list[graph_db.GraphMeta]: - return await graph_db.get_graphs_meta(filter_by="active") + async def get_graphs( + cls, user_id: Annotated[str, Depends(get_user_id)] + ) -> list[graph_db.GraphMeta]: + return await graph_db.get_graphs_meta(filter_by="active", user_id=user_id) @classmethod async def get_templates(cls) -> list[graph_db.GraphMeta]: @@ -413,9 +507,12 @@ class AgentServer(AppService): @classmethod async def get_graph( - cls, graph_id: str, version: int | None = None + cls, + graph_id: str, + user_id: Annotated[str, Depends(get_user_id)], + version: int | None = None, ) -> graph_db.Graph: - graph = await graph_db.get_graph(graph_id, version) + graph = await graph_db.get_graph(graph_id, version, user_id=user_id) if not graph: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") return graph @@ -432,30 +529,39 @@ class AgentServer(AppService): return graph @classmethod - async def get_graph_all_versions(cls, graph_id: str) -> list[graph_db.Graph]: - graphs = await graph_db.get_graph_all_versions(graph_id) + async def get_graph_all_versions( + cls, graph_id: str, user_id: Annotated[str, Depends(get_user_id)] + ) -> list[graph_db.Graph]: + graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id) if not graphs: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") return graphs @classmethod - async def create_new_graph(cls, create_graph: CreateGraph) -> graph_db.Graph: - return await cls.create_graph(create_graph, is_template=False) + async def create_new_graph( + cls, create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)] + ) -> graph_db.Graph: + return await cls.create_graph(create_graph, is_template=False, user_id=user_id) @classmethod - async def create_new_template(cls, create_graph: CreateGraph) -> graph_db.Graph: - return await cls.create_graph(create_graph, is_template=True) + async def create_new_template( + cls, create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)] + ) -> graph_db.Graph: + return await cls.create_graph(create_graph, is_template=True, user_id=user_id) @classmethod async def create_graph( - cls, create_graph: CreateGraph, is_template: bool + cls, create_graph: CreateGraph, is_template: bool, user_id: str ) -> graph_db.Graph: if create_graph.graph: graph = create_graph.graph elif create_graph.template_id: # Create a new graph from a template graph = await graph_db.get_graph( - create_graph.template_id, create_graph.template_version, template=True + create_graph.template_id, + create_graph.template_version, + template=True, + user_id=user_id, ) if not graph: raise HTTPException( @@ -471,16 +577,23 @@ class AgentServer(AppService): graph.is_active = not is_template graph.reassign_ids(reassign_graph_id=True) - return await graph_db.create_graph(graph) + return await graph_db.create_graph(graph, user_id=user_id) @classmethod - async def update_graph(cls, graph_id: str, graph: graph_db.Graph) -> graph_db.Graph: + async def update_graph( + cls, + graph_id: str, + graph: graph_db.Graph, + user_id: Annotated[str, Depends(get_user_id)], + ) -> graph_db.Graph: # Sanity check if graph.id and graph.id != graph_id: raise HTTPException(400, detail="Graph ID does not match ID in URI") # Determine new version - existing_versions = await graph_db.get_graph_all_versions(graph_id) + existing_versions = await graph_db.get_graph_all_versions( + graph_id, user_id=user_id + ) if not existing_versions: raise HTTPException(404, detail=f"Graph #{graph_id} not found") latest_version_number = max(g.version for g in existing_versions) @@ -496,43 +609,56 @@ class AgentServer(AppService): graph.is_active = not graph.is_template graph.reassign_ids() - new_graph_version = await graph_db.create_graph(graph) + new_graph_version = await graph_db.create_graph(graph, user_id=user_id) if new_graph_version.is_active: # Ensure new version is the only active version await graph_db.set_graph_active_version( - graph_id=graph_id, version=new_graph_version.version + graph_id=graph_id, version=new_graph_version.version, user_id=user_id ) return new_graph_version @classmethod async def set_graph_active_version( - cls, graph_id: str, request_body: SetGraphActiveVersion + cls, + graph_id: str, + request_body: SetGraphActiveVersion, + user_id: Annotated[str, Depends(get_user_id)], ): new_active_version = request_body.active_graph_version - if not await graph_db.get_graph(graph_id, new_active_version): + if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id): raise HTTPException( 404, f"Graph #{graph_id} v{new_active_version} not found" ) await graph_db.set_graph_active_version( - graph_id=graph_id, version=request_body.active_graph_version + graph_id=graph_id, + version=request_body.active_graph_version, + user_id=user_id, ) async def execute_graph( - self, graph_id: str, node_input: dict[Any, Any] + self, + graph_id: str, + node_input: dict[Any, Any], + user_id: Annotated[str, Depends(get_user_id)], ) -> dict[Any, Any]: try: - return self.execution_manager_client.add_execution(graph_id, node_input) + return self.execution_manager_client.add_execution( + graph_id, node_input, user_id=user_id + ) except Exception as e: msg = e.__str__().encode().decode("unicode_escape") raise HTTPException(status_code=400, detail=msg) @classmethod async def list_graph_runs( - cls, graph_id: str, graph_version: int | None = None + cls, + graph_id: str, + user_id: Annotated[str, Depends(get_user_id)], + graph_version: int | None = None, ) -> list[str]: - graph = await graph_db.get_graph(graph_id, graph_version) + graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id) if not graph: rev = "" if graph_version is None else f" v{graph_version}" raise HTTPException( @@ -543,38 +669,47 @@ class AgentServer(AppService): @classmethod async def get_run_execution_results( - cls, graph_id: str, run_id: str + cls, graph_id: str, run_id: str, user_id: Annotated[str, Depends(get_user_id)] ) -> list[ExecutionResult]: - graph = await graph_db.get_graph(graph_id) + graph = await graph_db.get_graph(graph_id, user_id=user_id) if not graph: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") return await get_execution_results(run_id) async def create_schedule( - self, graph_id: str, cron: str, input_data: dict[Any, Any] + self, + graph_id: str, + cron: str, + input_data: dict[Any, Any], + user_id: Annotated[str, Depends(get_user_id)], ) -> dict[Any, Any]: - graph = await graph_db.get_graph(graph_id) + graph = await graph_db.get_graph(graph_id, user_id=user_id) if not graph: raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") execution_scheduler = self.execution_scheduler_client return { "id": execution_scheduler.add_execution_schedule( - graph_id, graph.version, cron, input_data + graph_id, graph.version, cron, input_data, user_id=user_id ) } def update_schedule( - self, schedule_id: str, input_data: dict[Any, Any] + self, + schedule_id: str, + input_data: dict[Any, Any], + user_id: Annotated[str, Depends(get_user_id)], ) -> dict[Any, Any]: execution_scheduler = self.execution_scheduler_client is_enabled = input_data.get("is_enabled", False) - execution_scheduler.update_schedule(schedule_id, is_enabled) # type: ignore + execution_scheduler.update_schedule(schedule_id, is_enabled, user_id=user_id) # type: ignore return {"id": schedule_id} - def get_execution_schedules(self, graph_id: str) -> dict[str, str]: + def get_execution_schedules( + self, graph_id: str, user_id: Annotated[str, Depends(get_user_id)] + ) -> dict[str, str]: execution_scheduler = self.execution_scheduler_client - return execution_scheduler.get_execution_schedules(graph_id) # type: ignore + return execution_scheduler.get_execution_schedules(graph_id, user_id) # type: ignore @expose def send_execution_update(self, execution_result_dict: dict[Any, Any]): diff --git a/rnd/autogpt_server/autogpt_server/usecases/block_autogen.py b/rnd/autogpt_server/autogpt_server/usecases/block_autogen.py index 1ded46b33f..79f7cc3951 100644 --- a/rnd/autogpt_server/autogpt_server/usecases/block_autogen.py +++ b/rnd/autogpt_server/autogpt_server/usecases/block_autogen.py @@ -1,11 +1,14 @@ from pathlib import Path +from prisma.models import User + from autogpt_server.blocks.basic import ValueBlock from autogpt_server.blocks.block import BlockInstallationBlock from autogpt_server.blocks.http import HttpRequestBlock from autogpt_server.blocks.llm import TextLlmCallBlock from autogpt_server.blocks.text import TextFormatterBlock, TextParserBlock from autogpt_server.data.graph import Graph, Link, Node, create_graph +from autogpt_server.data.user import get_or_create_user from autogpt_server.util.test import SpinTestServer, wait_execution sample_block_modules = { @@ -23,6 +26,16 @@ for module, description in sample_block_modules.items(): sample_block_codes[module] = f"[Example: {description}]\n{code}" +async def create_test_user() -> User: + test_user_data = { + "sub": "ef3b97d7-1161-4eb4-92b2-10c24fb154c1", + "email": "testuser@example.com", + "name": "Test User", + } + user = await get_or_create_user(test_user_data) + return user + + def create_test_graph() -> Graph: """ ValueBlock (input) @@ -237,9 +250,12 @@ Here are a couple of sample of the Block class implementation: async def block_autogen_agent(): async with SpinTestServer() as server: test_manager = server.exec_manager - test_graph = await create_graph(create_test_graph()) + test_user = await create_test_user() + test_graph = await create_graph(create_test_graph(), user_id=test_user.id) input_data = {"input": "Write me a block that writes a string into a file."} - response = await server.agent_server.execute_graph(test_graph.id, input_data) + response = await server.agent_server.execute_graph( + test_graph.id, input_data, test_user.id + ) print(response) result = await wait_execution( exec_manager=test_manager, @@ -247,6 +263,7 @@ async def block_autogen_agent(): graph_exec_id=response["id"], num_execs=10, timeout=1200, + user_id=test_user.id, ) print(result) diff --git a/rnd/autogpt_server/autogpt_server/usecases/reddit_marketing.py b/rnd/autogpt_server/autogpt_server/usecases/reddit_marketing.py index 2505acd9eb..d9236bf1a0 100644 --- a/rnd/autogpt_server/autogpt_server/usecases/reddit_marketing.py +++ b/rnd/autogpt_server/autogpt_server/usecases/reddit_marketing.py @@ -1,7 +1,10 @@ +from prisma.models import User + from autogpt_server.blocks.llm import ObjectLlmCallBlock from autogpt_server.blocks.reddit import RedditGetPostsBlock, RedditPostCommentBlock from autogpt_server.blocks.text import TextFormatterBlock, TextMatcherBlock from autogpt_server.data.graph import Graph, Link, Node, create_graph +from autogpt_server.data.user import get_or_create_user from autogpt_server.util.test import SpinTestServer, wait_execution @@ -136,14 +139,29 @@ Make sure to only comment on a relevant post. return test_graph +async def create_test_user() -> User: + test_user_data = { + "sub": "ef3b97d7-1161-4eb4-92b2-10c24fb154c1", + "email": "testuser@example.com", + "name": "Test User", + } + user = await get_or_create_user(test_user_data) + return user + + async def reddit_marketing_agent(): async with SpinTestServer() as server: exec_man = server.exec_manager - test_graph = await create_graph(create_test_graph()) + test_user = await create_test_user() + test_graph = await create_graph(create_test_graph(), user_id=test_user.id) input_data = {"subreddit": "AutoGPT"} - response = await server.agent_server.execute_graph(test_graph.id, input_data) + response = await server.agent_server.execute_graph( + test_graph.id, input_data, test_user.id + ) print(response) - result = await wait_execution(exec_man, test_graph.id, response["id"], 13, 120) + result = await wait_execution( + exec_man, test_user.id, test_graph.id, response["id"], 13, 120 + ) print(result) diff --git a/rnd/autogpt_server/autogpt_server/usecases/sample.py b/rnd/autogpt_server/autogpt_server/usecases/sample.py index 9e7cb89bac..2ac9f43993 100644 --- a/rnd/autogpt_server/autogpt_server/usecases/sample.py +++ b/rnd/autogpt_server/autogpt_server/usecases/sample.py @@ -1,10 +1,23 @@ +from prisma.models import User + from autogpt_server.blocks.basic import InputBlock, PrintingBlock from autogpt_server.blocks.text import TextFormatterBlock from autogpt_server.data import graph from autogpt_server.data.graph import create_graph +from autogpt_server.data.user import get_or_create_user from autogpt_server.util.test import SpinTestServer, wait_execution +async def create_test_user() -> User: + test_user_data = { + "sub": "ef3b97d7-1161-4eb4-92b2-10c24fb154c1", + "email": "testuser@example.com", + "name": "Test User", + } + user = await get_or_create_user(test_user_data) + return user + + def create_test_graph() -> graph.Graph: """ ValueBlock @@ -63,11 +76,16 @@ def create_test_graph() -> graph.Graph: async def sample_agent(): async with SpinTestServer() as server: exec_man = server.exec_manager - test_graph = await create_graph(create_test_graph()) + test_user = await create_test_user() + test_graph = await create_graph(create_test_graph(), test_user.id) input_data = {"input_1": "Hello", "input_2": "World"} - response = await server.agent_server.execute_graph(test_graph.id, input_data) + response = await server.agent_server.execute_graph( + test_graph.id, input_data, test_user.id + ) print(response) - result = await wait_execution(exec_man, test_graph.id, response["id"], 4, 10) + result = await wait_execution( + exec_man, test_user.id, test_graph.id, response["id"], 4, 10 + ) print(result) diff --git a/rnd/autogpt_server/autogpt_server/util/settings.py b/rnd/autogpt_server/autogpt_server/util/settings.py index d9515bbf7b..db20f88cb9 100644 --- a/rnd/autogpt_server/autogpt_server/util/settings.py +++ b/rnd/autogpt_server/autogpt_server/util/settings.py @@ -57,6 +57,10 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): default="localhost", description="The default hostname of the Pyro server.", ) + enable_auth: str = Field( + default="false", + description="If authentication is enabled or not", + ) # Add more configuration fields as needed model_config = SettingsConfigDict( @@ -107,6 +111,11 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings): discord_bot_token: str = Field(default="", description="Discord bot token") + smtp_server: str = Field(default="", description="SMTP server IP") + smtp_port: str = Field(default="", description="SMTP server port") + smtp_username: str = Field(default="", description="SMTP username") + smtp_password: str = Field(default="", description="SMTP password") + # Add more secret fields as needed model_config = SettingsConfigDict( diff --git a/rnd/autogpt_server/autogpt_server/util/test.py b/rnd/autogpt_server/autogpt_server/util/test.py index 8242e776dd..47095c9a33 100644 --- a/rnd/autogpt_server/autogpt_server/util/test.py +++ b/rnd/autogpt_server/autogpt_server/util/test.py @@ -5,6 +5,7 @@ from autogpt_server.data.block import Block, initialize_blocks from autogpt_server.data.execution import ExecutionStatus from autogpt_server.executor import ExecutionManager, ExecutionScheduler from autogpt_server.server import AgentServer +from autogpt_server.server.server import get_user_id from autogpt_server.util.service import PyroNameServer log = print @@ -17,14 +18,21 @@ class SpinTestServer: self.agent_server = AgentServer() self.scheduler = ExecutionScheduler() + @staticmethod + def test_get_user_id(): + return "3e53486c-cf57-477e-ba2a-cb02dc828e1a" + async def __aenter__(self): + self.name_server.__enter__() + self.setup_dependency_overrides() self.agent_server.__enter__() self.exec_manager.__enter__() self.scheduler.__enter__() await db.connect() await initialize_blocks() + return self async def __aexit__(self, exc_type, exc_val, exc_tb): @@ -35,16 +43,25 @@ class SpinTestServer: self.exec_manager.__exit__(exc_type, exc_val, exc_tb) self.scheduler.__exit__(exc_type, exc_val, exc_tb) + def setup_dependency_overrides(self): + # Override get_user_id for testing + self.agent_server.set_test_dependency_overrides( + {get_user_id: self.test_get_user_id} + ) + async def wait_execution( exec_manager: ExecutionManager, + user_id: str, graph_id: str, graph_exec_id: str, num_execs: int, timeout: int = 20, ) -> list: async def is_execution_completed(): - execs = await AgentServer().get_run_execution_results(graph_id, graph_exec_id) + execs = await AgentServer().get_run_execution_results( + graph_id, graph_exec_id, user_id + ) return ( exec_manager.queue.empty() and len(execs) == num_execs @@ -58,7 +75,7 @@ async def wait_execution( for i in range(timeout): if await is_execution_completed(): return await AgentServer().get_run_execution_results( - graph_id, graph_exec_id + graph_id, graph_exec_id, user_id ) time.sleep(1) @@ -96,10 +113,14 @@ def execute_block_test(block: Block): ex_output_name, ex_output_data = block.test_output[output_index] def compare(data, expected_data): - if isinstance(expected_data, type): + if data == expected_data: + is_matching = True + elif isinstance(expected_data, type): is_matching = isinstance(data, expected_data) + elif callable(expected_data): + is_matching = expected_data(data) else: - is_matching = data == expected_data + is_matching = False mark = "✅" if is_matching else "❌" log(f"{prefix} {mark} comparing `{data}` vs `{expected_data}`") diff --git a/rnd/autogpt_server/graph_templates/Discord Bot Chat To LLM_v5.json b/rnd/autogpt_server/graph_templates/Discord Bot Chat To LLM_v5.json new file mode 100644 index 0000000000..b6ea9e6286 --- /dev/null +++ b/rnd/autogpt_server/graph_templates/Discord Bot Chat To LLM_v5.json @@ -0,0 +1,199 @@ +{ + "id": "381164dd-3c91-43fd-ba93-c12a13ce8499", + "version": 5, + "is_active": false, + "is_template": true, + "name": "Discord Bot Chat To LLM", + "description": "Simply send the bot the message \"!chat \" and it will reply.", + "nodes": [ + { + "id": "b8138bca-7892-42c2-9594-a845d3483413", + "block_id": "d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t", + "input_default": {}, + "metadata": { + "position": { + "x": -98.31744952152862, + "y": 291.1279542656707 + } + } + }, + { + "id": "b667bcc4-4e17-4343-bd31-14e48d99d21d", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": { + "input2": " Said: " + }, + "metadata": { + "position": { + "x": 642.0641136440832, + "y": -318.9010839696226 + } + } + }, + { + "id": "42eda7a9-fe29-45c8-9571-55222830142d", + "block_id": "3146e4fe-2cdd-4f29-bd12-0c9d5bb4deb0", + "input_default": { + "pattern": "(?<=!chat ).*" + }, + "metadata": { + "position": { + "x": 651.4338270731059, + "y": 120.68871252027822 + } + } + }, + { + "id": "9049f063-5b07-4984-b211-068bc93e653a", + "block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91", + "input_default": { + "model": "gpt-4o", + "sys_prompt": "You are a nice friendly AI" + }, + "metadata": { + "position": { + "x": 2099.785393180648, + "y": -325.6642266305269 + } + } + }, + { + "id": "dda2d061-2ef9-4dc5-9433-918c8395a4ac", + "block_id": "h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6", + "input_default": {}, + "metadata": { + "position": { + "x": 2697.355782645, + "y": 225.29000586164966 + } + } + }, + { + "id": "3209c5e1-2da9-4cd1-bf4b-2f9488577815", + "block_id": "1ff065e9-88e8-4358-9d82-8dc91f622ba9", + "input_default": { + "data": "DISCORD BOT API KEY HERE" + }, + "metadata": { + "position": { + "x": -772.5858672155341, + "y": 26.390737439792503 + } + } + }, + { + "id": "b6411821-bd48-4543-b526-0f7138e8ffe9", + "block_id": "1ff065e9-88e8-4358-9d82-8dc91f622ba9", + "input_default": { + "input": "DISCORD BOT API KEY HERE" + }, + "metadata": { + "position": { + "x": -778.4138607648867, + "y": 422.0409097488691 + } + } + }, + { + "id": "d693cda1-973d-4d62-b549-d696b73d51d9", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": {}, + "metadata": { + "position": { + "x": 1325.5852307018679, + "y": -328.95888935525124 + } + } + } + ], + "links": [ + { + "id": "80683364-c3e8-467b-a734-d5629f97cd30", + "source_id": "b8138bca-7892-42c2-9594-a845d3483413", + "sink_id": "42eda7a9-fe29-45c8-9571-55222830142d", + "source_name": "message_content", + "sink_name": "text", + "is_static": false + }, + { + "id": "8510bd83-1444-4a70-99e3-26c3ae28d7bf", + "source_id": "42eda7a9-fe29-45c8-9571-55222830142d", + "sink_id": "3209c5e1-2da9-4cd1-bf4b-2f9488577815", + "source_name": "negative", + "sink_name": "input", + "is_static": false + }, + { + "id": "ff48a673-1f18-4b05-b5e7-e6dcc3e65add", + "source_id": "b8138bca-7892-42c2-9594-a845d3483413", + "sink_id": "dda2d061-2ef9-4dc5-9433-918c8395a4ac", + "source_name": "channel_name", + "sink_name": "channel_name", + "is_static": false + }, + { + "id": "aebf9b2b-ee01-41bf-9c05-6444b6e5aa44", + "source_id": "3209c5e1-2da9-4cd1-bf4b-2f9488577815", + "sink_id": "b8138bca-7892-42c2-9594-a845d3483413", + "source_name": "output", + "sink_name": "discord_bot_token", + "is_static": false + }, + { + "id": "cdbf9290-1b63-463d-a869-a16734ebd03c", + "source_id": "9049f063-5b07-4984-b211-068bc93e653a", + "sink_id": "dda2d061-2ef9-4dc5-9433-918c8395a4ac", + "source_name": "response", + "sink_name": "message_content", + "is_static": false + }, + { + "id": "d9a51e17-c8de-4835-bee1-c1abba457c35", + "source_id": "dda2d061-2ef9-4dc5-9433-918c8395a4ac", + "sink_id": "3209c5e1-2da9-4cd1-bf4b-2f9488577815", + "source_name": "status", + "sink_name": "input", + "is_static": false + }, + { + "id": "7bea8f77-45d7-4884-974f-b8f5ad10a988", + "source_id": "b6411821-bd48-4543-b526-0f7138e8ffe9", + "sink_id": "b8138bca-7892-42c2-9594-a845d3483413", + "source_name": "output", + "sink_name": "discord_bot_token", + "is_static": false + }, + { + "id": "f2427ca7-3adf-450f-8be4-b8042eb0b9a6", + "source_id": "b8138bca-7892-42c2-9594-a845d3483413", + "sink_id": "b667bcc4-4e17-4343-bd31-14e48d99d21d", + "source_name": "username", + "sink_name": "input1", + "is_static": false + }, + { + "id": "117244bf-8c32-4096-baff-38cd0fa9cf9d", + "source_id": "b667bcc4-4e17-4343-bd31-14e48d99d21d", + "sink_id": "d693cda1-973d-4d62-b549-d696b73d51d9", + "source_name": "output", + "sink_name": "input1", + "is_static": false + }, + { + "id": "9ee4a0a5-de27-4bf8-81a9-140db1b5e475", + "source_id": "d693cda1-973d-4d62-b549-d696b73d51d9", + "sink_id": "9049f063-5b07-4984-b211-068bc93e653a", + "source_name": "output", + "sink_name": "prompt", + "is_static": false + }, + { + "id": "49da866a-8c13-469c-95ea-fe4685e95c75", + "source_id": "42eda7a9-fe29-45c8-9571-55222830142d", + "sink_id": "d693cda1-973d-4d62-b549-d696b73d51d9", + "source_name": "positive", + "sink_name": "input2", + "is_static": false + } + ] +} \ No newline at end of file diff --git a/rnd/autogpt_server/graph_templates/Discord Search Bot_v17.json b/rnd/autogpt_server/graph_templates/Discord Search Bot_v17.json new file mode 100644 index 0000000000..7de268a970 --- /dev/null +++ b/rnd/autogpt_server/graph_templates/Discord Search Bot_v17.json @@ -0,0 +1,266 @@ +{ + "id": "696b4b9c-f28f-4dda-a44c-e748ac22438f", + "version": 17, + "is_active": false, + "is_template": true, + "name": "Discord Search Bot", + "description": "This is a Discord search bot, send it the command \"!search \" and it will do a web search and answer your question!", + "nodes": [ + { + "id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "block_id": "d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t", + "input_default": {}, + "metadata": { + "position": { + "x": -961.2660758713816, + "y": 333.47185665649613 + } + } + }, + { + "id": "b09e201a-cd71-42d4-a197-22e7eebc54c9", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": { + "input2": ", Here is the latest web info to answer the question : \n" + }, + "metadata": { + "position": { + "x": 881.3259434267115, + "y": -564.3287840347994 + } + } + }, + { + "id": "3169d1a8-b541-43f7-97ce-ddc6aecb2080", + "block_id": "3146e4fe-2cdd-4f29-bd12-0c9d5bb4deb0", + "input_default": { + "pattern": "(?<=!search ).*" + }, + "metadata": { + "position": { + "x": -284.1111358361005, + "y": -43.71794261767991 + } + } + }, + { + "id": "5658c4f7-8e67-4d30-93f2-157bdbd3ef87", + "block_id": "b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7", + "input_default": {}, + "metadata": { + "position": { + "x": 319.9343851243159, + "y": -48.49947115893917 + } + } + }, + { + "id": "b29e3831-3fb7-41bd-88d8-ce3a5dde3d69", + "block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91", + "input_default": { + "model": "gpt-4o", + "sys_prompt": "You are a question answerer and info summariser, answer the questions with the info you are provided, be sure to @ the user who asked the question in your reply like @username" + }, + "metadata": { + "position": { + "x": 2085.06017081387, + "y": -387.5334342999411 + } + } + }, + { + "id": "164bc3ea-e812-4391-a62d-bdddcf86f3cd", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": {}, + "metadata": { + "position": { + "x": 1469.6744442484253, + "y": -435.0392111332514 + } + } + }, + { + "id": "10759047-6387-4ff1-9117-bbef47d24ee8", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": {}, + "metadata": { + "position": { + "x": 326.8949613725521, + "y": -579.6877803706152 + } + } + }, + { + "id": "af7c5160-7bf0-4ad0-9806-04222009091f", + "block_id": "e30a4d42-7b7d-4e6a-b36e-1f9b8e3b7d85", + "input_default": { + "input2": " Asked the question: " + }, + "metadata": { + "position": { + "x": -265.6965655001714, + "y": -628.1379507780849 + } + } + }, + { + "id": "4d74513d-42f7-4fd0-808a-0f4844513966", + "block_id": "1ff065e9-88e8-4358-9d82-8dc91f622ba9", + "input_default": { + "input": "DISCORD BOT API KEY HERE" + }, + "metadata": { + "position": { + "x": -1532.6418163253616, + "y": 587.6533051108552 + } + } + }, + { + "id": "f3d62f22-d193-4f04-85d2-164200fca4c0", + "block_id": "h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6", + "input_default": {}, + "metadata": { + "position": { + "x": 2814.192971071703, + "y": 310.74654561036294 + } + } + }, + { + "id": "3b2bb6a5-9c42-4189-a9a0-0e499ccb766a", + "block_id": "1ff065e9-88e8-4358-9d82-8dc91f622ba9", + "input_default": { + "data": "DISCORD BOT API KEY HERE" + }, + "metadata": { + "position": { + "x": -1528.6418163253616, + "y": 119.65330511085517 + } + } + } + ], + "links": [ + { + "id": "346a8259-1093-4374-8271-904742aa6d89", + "source_id": "b29e3831-3fb7-41bd-88d8-ce3a5dde3d69", + "sink_id": "f3d62f22-d193-4f04-85d2-164200fca4c0", + "source_name": "response", + "sink_name": "message_content", + "is_static": false + }, + { + "id": "53a8ecc6-60b6-4f4a-90c4-cb11dd1874e0", + "source_id": "5658c4f7-8e67-4d30-93f2-157bdbd3ef87", + "sink_id": "164bc3ea-e812-4391-a62d-bdddcf86f3cd", + "source_name": "results", + "sink_name": "input2", + "is_static": false + }, + { + "id": "2b3fd279-5816-48da-b2ab-484497fe67d5", + "source_id": "f3d62f22-d193-4f04-85d2-164200fca4c0", + "sink_id": "3b2bb6a5-9c42-4189-a9a0-0e499ccb766a", + "source_name": "status", + "sink_name": "input", + "is_static": false + }, + { + "id": "bb036c88-4031-4c6c-a70b-a82f5e50a013", + "source_id": "4d74513d-42f7-4fd0-808a-0f4844513966", + "sink_id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "source_name": "output", + "sink_name": "discord_bot_token", + "is_static": false + }, + { + "id": "e67befdc-59b5-47bf-9663-8baeeef026f7", + "source_id": "3169d1a8-b541-43f7-97ce-ddc6aecb2080", + "sink_id": "10759047-6387-4ff1-9117-bbef47d24ee8", + "source_name": "positive", + "sink_name": "input2", + "is_static": false + }, + { + "id": "9c0fa608-ceea-44cd-98cf-8a2d6ed25b24", + "source_id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "sink_id": "af7c5160-7bf0-4ad0-9806-04222009091f", + "source_name": "username", + "sink_name": "input1", + "is_static": false + }, + { + "id": "ad5e1bd6-69bd-4846-87dc-e08d8d2e0f2b", + "source_id": "af7c5160-7bf0-4ad0-9806-04222009091f", + "sink_id": "10759047-6387-4ff1-9117-bbef47d24ee8", + "source_name": "output", + "sink_name": "input1", + "is_static": false + }, + { + "id": "96f4b2fd-82d8-4754-9f41-f65e8e1f565a", + "source_id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "sink_id": "3169d1a8-b541-43f7-97ce-ddc6aecb2080", + "source_name": "message_content", + "sink_name": "text", + "is_static": false + }, + { + "id": "ec6666bc-4d54-4960-b3b1-13a0b4a872a7", + "source_id": "3b2bb6a5-9c42-4189-a9a0-0e499ccb766a", + "sink_id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "source_name": "output", + "sink_name": "discord_bot_token", + "is_static": false + }, + { + "id": "ccd08d1f-7ccc-42fa-882c-91f6991ad5e8", + "source_id": "b09e201a-cd71-42d4-a197-22e7eebc54c9", + "sink_id": "164bc3ea-e812-4391-a62d-bdddcf86f3cd", + "source_name": "output", + "sink_name": "input1", + "is_static": false + }, + { + "id": "3ed20f9c-3f79-41e4-8fab-0309e92ac629", + "source_id": "60ba4aac-1751-4be7-8745-1bd32191d4a2", + "sink_id": "f3d62f22-d193-4f04-85d2-164200fca4c0", + "source_name": "channel_name", + "sink_name": "channel_name", + "is_static": false + }, + { + "id": "89a129e5-11d2-4fac-9a15-7de182a2b806", + "source_id": "164bc3ea-e812-4391-a62d-bdddcf86f3cd", + "sink_id": "b29e3831-3fb7-41bd-88d8-ce3a5dde3d69", + "source_name": "output", + "sink_name": "prompt", + "is_static": false + }, + { + "id": "7978ef39-d862-441d-936f-8da60fefcab6", + "source_id": "10759047-6387-4ff1-9117-bbef47d24ee8", + "sink_id": "b09e201a-cd71-42d4-a197-22e7eebc54c9", + "source_name": "output", + "sink_name": "input1", + "is_static": false + }, + { + "id": "32e3bace-5df7-4683-97f2-7d9864878aee", + "source_id": "3169d1a8-b541-43f7-97ce-ddc6aecb2080", + "sink_id": "5658c4f7-8e67-4d30-93f2-157bdbd3ef87", + "source_name": "positive", + "sink_name": "query", + "is_static": false + }, + { + "id": "0ab7dce1-84b6-4f96-9eb2-1b458fe205a5", + "source_id": "3169d1a8-b541-43f7-97ce-ddc6aecb2080", + "sink_id": "3b2bb6a5-9c42-4189-a9a0-0e499ccb766a", + "source_name": "negative", + "sink_name": "input", + "is_static": false + } + ] +} \ No newline at end of file diff --git a/rnd/autogpt_server/migrations/20240805115456_add_user_management/migration.sql b/rnd/autogpt_server/migrations/20240805115456_add_user_management/migration.sql new file mode 100644 index 0000000000..36c213ad2f --- /dev/null +++ b/rnd/autogpt_server/migrations/20240805115456_add_user_management/migration.sql @@ -0,0 +1,60 @@ +-- CreateTable +CREATE TABLE "User" ( + "id" TEXT NOT NULL PRIMARY KEY, + "email" TEXT NOT NULL, + "name" TEXT, + "createdAt" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" DATETIME NOT NULL +); + +-- RedefineTables +PRAGMA foreign_keys=OFF; +CREATE TABLE "new_AgentGraph" ( + "id" TEXT NOT NULL, + "version" INTEGER NOT NULL DEFAULT 1, + "name" TEXT, + "description" TEXT, + "isActive" BOOLEAN NOT NULL DEFAULT true, + "isTemplate" BOOLEAN NOT NULL DEFAULT false, + "userId" TEXT, + "agentGraphParentId" TEXT, + + PRIMARY KEY ("id", "version"), + CONSTRAINT "AgentGraph_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE, + CONSTRAINT "AgentGraph_agentGraphParentId_version_fkey" FOREIGN KEY ("agentGraphParentId", "version") REFERENCES "AgentGraph" ("id", "version") ON DELETE RESTRICT ON UPDATE CASCADE +); +INSERT INTO "new_AgentGraph" ("agentGraphParentId", "description", "id", "isActive", "isTemplate", "name", "version") SELECT "agentGraphParentId", "description", "id", "isActive", "isTemplate", "name", "version" FROM "AgentGraph"; +DROP TABLE "AgentGraph"; +ALTER TABLE "new_AgentGraph" RENAME TO "AgentGraph"; +CREATE TABLE "new_AgentGraphExecution" ( + "id" TEXT NOT NULL PRIMARY KEY, + "agentGraphId" TEXT NOT NULL, + "agentGraphVersion" INTEGER NOT NULL DEFAULT 1, + "userId" TEXT, + CONSTRAINT "AgentGraphExecution_agentGraphId_agentGraphVersion_fkey" FOREIGN KEY ("agentGraphId", "agentGraphVersion") REFERENCES "AgentGraph" ("id", "version") ON DELETE RESTRICT ON UPDATE CASCADE, + CONSTRAINT "AgentGraphExecution_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_AgentGraphExecution" ("agentGraphId", "agentGraphVersion", "id") SELECT "agentGraphId", "agentGraphVersion", "id" FROM "AgentGraphExecution"; +DROP TABLE "AgentGraphExecution"; +ALTER TABLE "new_AgentGraphExecution" RENAME TO "AgentGraphExecution"; +CREATE TABLE "new_AgentGraphExecutionSchedule" ( + "id" TEXT NOT NULL PRIMARY KEY, + "agentGraphId" TEXT NOT NULL, + "agentGraphVersion" INTEGER NOT NULL DEFAULT 1, + "schedule" TEXT NOT NULL, + "isEnabled" BOOLEAN NOT NULL DEFAULT true, + "inputData" TEXT NOT NULL, + "lastUpdated" DATETIME NOT NULL, + "userId" TEXT, + CONSTRAINT "AgentGraphExecutionSchedule_agentGraphId_agentGraphVersion_fkey" FOREIGN KEY ("agentGraphId", "agentGraphVersion") REFERENCES "AgentGraph" ("id", "version") ON DELETE RESTRICT ON UPDATE CASCADE, + CONSTRAINT "AgentGraphExecutionSchedule_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User" ("id") ON DELETE SET NULL ON UPDATE CASCADE +); +INSERT INTO "new_AgentGraphExecutionSchedule" ("agentGraphId", "agentGraphVersion", "id", "inputData", "isEnabled", "lastUpdated", "schedule") SELECT "agentGraphId", "agentGraphVersion", "id", "inputData", "isEnabled", "lastUpdated", "schedule" FROM "AgentGraphExecutionSchedule"; +DROP TABLE "AgentGraphExecutionSchedule"; +ALTER TABLE "new_AgentGraphExecutionSchedule" RENAME TO "AgentGraphExecutionSchedule"; +CREATE INDEX "AgentGraphExecutionSchedule_isEnabled_idx" ON "AgentGraphExecutionSchedule"("isEnabled"); +PRAGMA foreign_key_check; +PRAGMA foreign_keys=ON; + +-- CreateIndex +CREATE UNIQUE INDEX "User_email_key" ON "User"("email"); diff --git a/rnd/autogpt_server/migrations/20240807123716_add_index_users/migration.sql b/rnd/autogpt_server/migrations/20240807123716_add_index_users/migration.sql new file mode 100644 index 0000000000..54a70dca5a --- /dev/null +++ b/rnd/autogpt_server/migrations/20240807123716_add_index_users/migration.sql @@ -0,0 +1,5 @@ +-- CreateIndex +CREATE INDEX "User_id_idx" ON "User"("id"); + +-- CreateIndex +CREATE INDEX "User_email_idx" ON "User"("email"); diff --git a/rnd/autogpt_server/poetry.lock b/rnd/autogpt_server/poetry.lock index ffb7d270b4..c394929896 100644 --- a/rnd/autogpt_server/poetry.lock +++ b/rnd/autogpt_server/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "agpt" @@ -25,7 +25,7 @@ requests = "*" sentry-sdk = "^1.40.4" [package.extras] -benchmark = ["agbenchmark @ file:///home/bently/Desktop/autogpt-ui/AutoGPT/benchmark"] +benchmark = ["agbenchmark"] [package.source] type = "directory" @@ -329,7 +329,7 @@ watchdog = "4.0.0" webdriver-manager = "^4.0.1" [package.extras] -benchmark = ["agbenchmark @ file:///home/bently/Desktop/autogpt-ui/AutoGPT/benchmark"] +benchmark = ["agbenchmark"] [package.source] type = "directory" @@ -342,7 +342,7 @@ description = "Shared libraries across NextGen AutoGPT" optional = false python-versions = ">=3.10,<4.0" files = [] -develop = true +develop = false [package.dependencies] pyjwt = "^2.8.0" @@ -4212,19 +4212,19 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -6419,4 +6419,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "9991857e7076d3bfcbae7af6c2cec54dc943167a3adceb5a0ebf74d80c05778f" +content-hash = "003a4c89682abbf72c67631367f57e56d91d72b44f95e972b2326440199045e7" diff --git a/rnd/autogpt_server/postgres/migrations/20240805115810_add_user_management/migration.sql b/rnd/autogpt_server/postgres/migrations/20240805115810_add_user_management/migration.sql new file mode 100644 index 0000000000..f446f21534 --- /dev/null +++ b/rnd/autogpt_server/postgres/migrations/20240805115810_add_user_management/migration.sql @@ -0,0 +1,31 @@ +-- AlterTable +ALTER TABLE "AgentGraph" ADD COLUMN "userId" TEXT; + +-- AlterTable +ALTER TABLE "AgentGraphExecution" ADD COLUMN "userId" TEXT; + +-- AlterTable +ALTER TABLE "AgentGraphExecutionSchedule" ADD COLUMN "userId" TEXT; + +-- CreateTable +CREATE TABLE "User" ( + "id" TEXT NOT NULL, + "email" TEXT NOT NULL, + "name" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "User_pkey" PRIMARY KEY ("id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "User_email_key" ON "User"("email"); + +-- AddForeignKey +ALTER TABLE "AgentGraph" ADD CONSTRAINT "AgentGraph_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "AgentGraphExecution" ADD CONSTRAINT "AgentGraphExecution_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "AgentGraphExecutionSchedule" ADD CONSTRAINT "AgentGraphExecutionSchedule_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE SET NULL ON UPDATE CASCADE; diff --git a/rnd/autogpt_server/postgres/migrations/20240807123738_add_index_users/migration.sql b/rnd/autogpt_server/postgres/migrations/20240807123738_add_index_users/migration.sql new file mode 100644 index 0000000000..54a70dca5a --- /dev/null +++ b/rnd/autogpt_server/postgres/migrations/20240807123738_add_index_users/migration.sql @@ -0,0 +1,5 @@ +-- CreateIndex +CREATE INDEX "User_id_idx" ON "User"("id"); + +-- CreateIndex +CREATE INDEX "User_email_idx" ON "User"("email"); diff --git a/rnd/autogpt_server/postgres/schema.prisma b/rnd/autogpt_server/postgres/schema.prisma index 240af74404..a5daf683b5 100644 --- a/rnd/autogpt_server/postgres/schema.prisma +++ b/rnd/autogpt_server/postgres/schema.prisma @@ -10,6 +10,23 @@ generator client { interface = "asyncio" } +// User model to mirror Auth provider users +model User { + id String @id // This should match the Supabase user ID + email String @unique + name String? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + // Relations + AgentGraphs AgentGraph[] + AgentGraphExecutions AgentGraphExecution[] + AgentGraphExecutionSchedules AgentGraphExecutionSchedule[] + + @@index([id]) + @@index([email]) +} + // This model describes the Agent Graph/Flow (Multi Agent System). model AgentGraph { id String @default(uuid()) @@ -20,6 +37,10 @@ model AgentGraph { isActive Boolean @default(true) isTemplate Boolean @default(false) + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) + AgentNodes AgentNode[] AgentGraphExecution AgentGraphExecution[] AgentGraphExecutionSchedule AgentGraphExecutionSchedule[] @@ -99,6 +120,10 @@ model AgentGraphExecution { AgentGraph AgentGraph @relation(fields: [agentGraphId, agentGraphVersion], references: [id, version]) AgentNodeExecutions AgentNodeExecution[] + + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) } // This model describes the execution of an AgentNode. @@ -158,5 +183,9 @@ model AgentGraphExecutionSchedule { // default and set the value on each update, lastUpdated field has no time zone. lastUpdated DateTime @updatedAt + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) + @@index([isEnabled]) } diff --git a/rnd/autogpt_server/pyproject.toml b/rnd/autogpt_server/pyproject.toml index a94e188a08..fd9ad9fef7 100644 --- a/rnd/autogpt_server/pyproject.toml +++ b/rnd/autogpt_server/pyproject.toml @@ -41,7 +41,7 @@ python-dotenv = "^1.0.1" expiringdict = "^1.2.2" discord-py = "^2.4.0" -autogpt-libs = { path = "../autogpt_libs", develop = true } +autogpt-libs = {path = "../autogpt_libs"} [tool.poetry.group.dev.dependencies] cx-freeze = { git = "https://github.com/ntindle/cx_Freeze.git", rev = "main", develop = true } poethepoet = "^0.26.1" diff --git a/rnd/autogpt_server/schema.prisma b/rnd/autogpt_server/schema.prisma index 5acf4b757a..2d645c418b 100644 --- a/rnd/autogpt_server/schema.prisma +++ b/rnd/autogpt_server/schema.prisma @@ -9,6 +9,23 @@ generator client { interface = "asyncio" } +// User model to mirror Auth provider users +model User { + id String @id // This should match the Supabase user ID + email String @unique + name String? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + // Relations + AgentGraphs AgentGraph[] + AgentGraphExecutions AgentGraphExecution[] + AgentGraphExecutionSchedules AgentGraphExecutionSchedule[] + + @@index([id]) + @@index([email]) +} + // This model describes the Agent Graph/Flow (Multi Agent System). model AgentGraph { id String @default(uuid()) @@ -19,6 +36,10 @@ model AgentGraph { isActive Boolean @default(true) isTemplate Boolean @default(false) + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) + AgentNodes AgentNode[] AgentGraphExecution AgentGraphExecution[] AgentGraphExecutionSchedule AgentGraphExecutionSchedule[] @@ -98,6 +119,10 @@ model AgentGraphExecution { AgentGraph AgentGraph @relation(fields: [agentGraphId, agentGraphVersion], references: [id, version]) AgentNodeExecutions AgentNodeExecution[] + + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) } // This model describes the execution of an AgentNode. @@ -157,5 +182,9 @@ model AgentGraphExecutionSchedule { // default and set the value on each update, lastUpdated field has no time zone. lastUpdated DateTime @updatedAt + // Link to User model + userId String? + user User? @relation(fields: [userId], references: [id]) + @@index([isEnabled]) -} +} \ No newline at end of file diff --git a/rnd/autogpt_server/test/executor/test_manager.py b/rnd/autogpt_server/test/executor/test_manager.py index db0082e1a9..96961c0219 100644 --- a/rnd/autogpt_server/test/executor/test_manager.py +++ b/rnd/autogpt_server/test/executor/test_manager.py @@ -1,11 +1,12 @@ import pytest +from prisma.models import User from autogpt_server.blocks.basic import ObjectLookupBlock, ValueBlock from autogpt_server.blocks.maths import MathsBlock, Operation from autogpt_server.data import execution, graph from autogpt_server.executor import ExecutionManager from autogpt_server.server import AgentServer -from autogpt_server.usecases.sample import create_test_graph +from autogpt_server.usecases.sample import create_test_graph, create_test_user from autogpt_server.util.test import wait_execution @@ -13,24 +14,30 @@ async def execute_graph( agent_server: AgentServer, test_manager: ExecutionManager, test_graph: graph.Graph, + test_user: User, input_data: dict, num_execs: int = 4, ) -> str: # --- Test adding new executions --- # - response = await agent_server.execute_graph(test_graph.id, input_data) + response = await agent_server.execute_graph(test_graph.id, input_data, test_user.id) graph_exec_id = response["id"] # Execution queue should be empty - assert await wait_execution(test_manager, test_graph.id, graph_exec_id, num_execs) + assert await wait_execution( + test_manager, test_user.id, test_graph.id, graph_exec_id, num_execs + ) return graph_exec_id async def assert_sample_graph_executions( - agent_server: AgentServer, test_graph: graph.Graph, graph_exec_id: str + agent_server: AgentServer, + test_graph: graph.Graph, + test_user: User, + graph_exec_id: str, ): input = {"input_1": "Hello", "input_2": "World"} executions = await agent_server.get_run_execution_results( - test_graph.id, graph_exec_id + test_graph.id, graph_exec_id, test_user.id ) # Executing ValueBlock @@ -75,12 +82,20 @@ async def assert_sample_graph_executions( @pytest.mark.asyncio(scope="session") async def test_agent_execution(server): test_graph = create_test_graph() - await graph.create_graph(test_graph) + test_user = await create_test_user() + await graph.create_graph(test_graph, user_id=test_user.id) data = {"input_1": "Hello", "input_2": "World"} graph_exec_id = await execute_graph( - server.agent_server, server.exec_manager, test_graph, data, 4 + server.agent_server, + server.exec_manager, + test_graph, + test_user, + data, + 4, + ) + await assert_sample_graph_executions( + server.agent_server, test_graph, test_user, graph_exec_id ) - await assert_sample_graph_executions(server.agent_server, test_graph, graph_exec_id) @pytest.mark.asyncio(scope="session") @@ -130,14 +145,14 @@ async def test_input_pin_always_waited(server): nodes=nodes, links=links, ) - - test_graph = await graph.create_graph(test_graph) + test_user = await create_test_user() + test_graph = await graph.create_graph(test_graph, user_id=test_user.id) graph_exec_id = await execute_graph( - server.agent_server, server.exec_manager, test_graph, {}, 3 + server.agent_server, server.exec_manager, test_graph, test_user, {}, 3 ) executions = await server.agent_server.get_run_execution_results( - test_graph.id, graph_exec_id + test_graph.id, graph_exec_id, test_user.id ) assert len(executions) == 3 # ObjectLookupBlock should wait for the input pin to be provided, @@ -211,13 +226,13 @@ async def test_static_input_link_on_graph(server): nodes=nodes, links=links, ) - - test_graph = await graph.create_graph(test_graph) + test_user = await create_test_user() + test_graph = await graph.create_graph(test_graph, user_id=test_user.id) graph_exec_id = await execute_graph( - server.agent_server, server.exec_manager, test_graph, {}, 8 + server.agent_server, server.exec_manager, test_graph, test_user, {}, 8 ) executions = await server.agent_server.get_run_execution_results( - test_graph.id, graph_exec_id + test_graph.id, graph_exec_id, test_user.id ) assert len(executions) == 8 # The last 3 executions will be a+b=4+5=9 diff --git a/rnd/autogpt_server/test/executor/test_scheduler.py b/rnd/autogpt_server/test/executor/test_scheduler.py index 354b73e4de..be16ee6c8a 100644 --- a/rnd/autogpt_server/test/executor/test_scheduler.py +++ b/rnd/autogpt_server/test/executor/test_scheduler.py @@ -2,32 +2,34 @@ import pytest from autogpt_server.data import db, graph from autogpt_server.executor import ExecutionScheduler -from autogpt_server.usecases.sample import create_test_graph +from autogpt_server.usecases.sample import create_test_graph, create_test_user from autogpt_server.util.service import get_service_client @pytest.mark.asyncio(scope="session") async def test_agent_schedule(server): await db.connect() - test_graph = await graph.create_graph(create_test_graph()) + test_user = await create_test_user() + test_graph = await graph.create_graph(create_test_graph(), user_id=test_user.id) scheduler = get_service_client(ExecutionScheduler) - schedules = scheduler.get_execution_schedules(test_graph.id) + schedules = scheduler.get_execution_schedules(test_graph.id, test_user.id) assert len(schedules) == 0 schedule_id = scheduler.add_execution_schedule( graph_id=test_graph.id, + user_id=test_user.id, graph_version=1, cron="0 0 * * *", input_data={"input": "data"}, ) assert schedule_id - schedules = scheduler.get_execution_schedules(test_graph.id) + schedules = scheduler.get_execution_schedules(test_graph.id, test_user.id) assert len(schedules) == 1 assert schedules[schedule_id] == "0 0 * * *" - scheduler.update_schedule(schedule_id, is_enabled=False) - schedules = scheduler.get_execution_schedules(test_graph.id) + scheduler.update_schedule(schedule_id, is_enabled=False, user_id=test_user.id) + schedules = scheduler.get_execution_schedules(test_graph.id, user_id=test_user.id) assert len(schedules) == 0 diff --git a/rnd/infra/helm/autogpt_builder/.helmignore b/rnd/infra/helm/autogpt_builder/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/rnd/infra/helm/autogpt_builder/Chart.yaml b/rnd/infra/helm/autogpt_builder/Chart.yaml new file mode 100644 index 0000000000..96653b89ea --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/Chart.yaml @@ -0,0 +1,24 @@ +apiVersion: v2 +name: autogpt_builder +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" diff --git a/rnd/infra/helm/autogpt_builder/templates/NOTES.txt b/rnd/infra/helm/autogpt_builder/templates/NOTES.txt new file mode 100644 index 0000000000..8ce6c44ba8 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/NOTES.txt @@ -0,0 +1,22 @@ +1. Get the application URL by running these commands: +{{- if .Values.ingress.enabled }} +{{- range $host := .Values.ingress.hosts }} + {{- range .paths }} + http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }} + {{- end }} +{{- end }} +{{- else if contains "NodePort" .Values.service.type }} + export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "autogpt_builder.fullname" . }}) + export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") + echo http://$NODE_IP:$NODE_PORT +{{- else if contains "LoadBalancer" .Values.service.type }} + NOTE: It may take a few minutes for the LoadBalancer IP to be available. + You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "autogpt_builder.fullname" . }}' + export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "autogpt_builder.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") + echo http://$SERVICE_IP:{{ .Values.service.port }} +{{- else if contains "ClusterIP" .Values.service.type }} + export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "autogpt_builder.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") + export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") + echo "Visit http://127.0.0.1:8080 to use your application" + kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT +{{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/_helpers.tpl b/rnd/infra/helm/autogpt_builder/templates/_helpers.tpl new file mode 100644 index 0000000000..8e65eaf4be --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/_helpers.tpl @@ -0,0 +1,62 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "autogpt_builder.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "autogpt_builder.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "autogpt_builder.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "autogpt_builder.labels" -}} +helm.sh/chart: {{ include "autogpt_builder.chart" . }} +{{ include "autogpt_builder.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "autogpt_builder.selectorLabels" -}} +app.kubernetes.io/name: {{ include "autogpt_builder.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "autogpt_builder.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "autogpt_builder.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/deployment.yaml b/rnd/infra/helm/autogpt_builder/templates/deployment.yaml new file mode 100644 index 0000000000..1c19a77ac3 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/deployment.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "autogpt_builder.fullname" . }} + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} +spec: + {{- if not .Values.autoscaling.enabled }} + replicas: {{ .Values.replicaCount }} + {{- end }} + selector: + matchLabels: + {{- include "autogpt_builder.selectorLabels" . | nindent 6 }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "autogpt_builder.labels" . | nindent 8 }} + {{- with .Values.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "autogpt_builder.serviceAccountName" . }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ .Chart.Name }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: http + containerPort: {{ .Values.service.port }} + protocol: TCP + livenessProbe: + {{- toYaml .Values.livenessProbe | nindent 12 }} + readinessProbe: + {{- toYaml .Values.readinessProbe | nindent 12 }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.volumes }} + volumes: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/hpa.yaml b/rnd/infra/helm/autogpt_builder/templates/hpa.yaml new file mode 100644 index 0000000000..f3e5b90e83 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/hpa.yaml @@ -0,0 +1,32 @@ +{{- if .Values.autoscaling.enabled }} +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "autogpt_builder.fullname" . }} + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ include "autogpt_builder.fullname" . }} + minReplicas: {{ .Values.autoscaling.minReplicas }} + maxReplicas: {{ .Values.autoscaling.maxReplicas }} + metrics: + {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} + {{- end }} + {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} + - type: Resource + resource: + name: memory + target: + type: Utilization + averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} + {{- end }} +{{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/ingress.yaml b/rnd/infra/helm/autogpt_builder/templates/ingress.yaml new file mode 100644 index 0000000000..2e0c5f75f7 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/ingress.yaml @@ -0,0 +1,61 @@ +{{- if .Values.ingress.enabled -}} +{{- $fullName := include "autogpt_builder.fullname" . -}} +{{- $svcPort := .Values.service.port -}} +{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} + {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} + {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} + {{- end }} +{{- end }} +{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 +{{- else -}} +apiVersion: extensions/v1beta1 +{{- end }} +kind: Ingress +metadata: + name: {{ $fullName }} + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} + ingressClassName: {{ .Values.ingress.className }} + {{- end }} + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .Values.ingress.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} + pathType: {{ .pathType }} + {{- end }} + backend: + {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} + {{- else }} + serviceName: {{ $fullName }} + servicePort: {{ $svcPort }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/managedcert.yaml b/rnd/infra/helm/autogpt_builder/templates/managedcert.yaml new file mode 100644 index 0000000000..c1d9372478 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/managedcert.yaml @@ -0,0 +1,7 @@ +apiVersion: networking.gke.io/v1 +kind: ManagedCertificate +metadata: + name: {{ include "autogpt-builder.fullname" . }}-cert +spec: + domains: + - {{ .Values.domain }} \ No newline at end of file diff --git a/rnd/infra/helm/autogpt_builder/templates/service.yaml b/rnd/infra/helm/autogpt_builder/templates/service.yaml new file mode 100644 index 0000000000..050b932111 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "autogpt_builder.fullname" . }} + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "autogpt_builder.selectorLabels" . | nindent 4 }} diff --git a/rnd/infra/helm/autogpt_builder/templates/serviceaccount.yaml b/rnd/infra/helm/autogpt_builder/templates/serviceaccount.yaml new file mode 100644 index 0000000000..a50ea11db8 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/serviceaccount.yaml @@ -0,0 +1,13 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "autogpt_builder.serviceAccountName" . }} + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +automountServiceAccountToken: {{ .Values.serviceAccount.automount }} +{{- end }} diff --git a/rnd/infra/helm/autogpt_builder/templates/tests/test-connection.yaml b/rnd/infra/helm/autogpt_builder/templates/tests/test-connection.yaml new file mode 100644 index 0000000000..3ca1ba1119 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/templates/tests/test-connection.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Pod +metadata: + name: "{{ include "autogpt_builder.fullname" . }}-test-connection" + labels: + {{- include "autogpt_builder.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test +spec: + containers: + - name: wget + image: busybox + command: ['wget'] + args: ['{{ include "autogpt_builder.fullname" . }}:{{ .Values.service.port }}'] + restartPolicy: Never diff --git a/rnd/infra/helm/autogpt_builder/values.dev.yaml b/rnd/infra/helm/autogpt_builder/values.dev.yaml new file mode 100644 index 0000000000..d618aa958e --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/values.dev.yaml @@ -0,0 +1,77 @@ +# dev values, overwrite base values as needed. + +image: + repository: us-east1-docker.pkg.dev/agpt-dev/agpt-builder-dev/agpt-builder-dev + pullPolicy: Always + tag: "latest" + +serviceAccount: + annotations: + iam.gke.io/gcp-service-account: "dev-agpt-builder-sa@agpt-dev.iam.gserviceaccount.com" + name: "dev-agpt-builder-sa" + +service: + type: ClusterIP + port: 8000 + targetPort: 3000 + annotations: + cloud.google.com/neg: '{"ingress": true}' + +ingress: + enabled: true + className: "gce" + annotations: + kubernetes.io/ingress.class: gce + kubernetes.io/ingress.global-static-ip-name: "agpt-dev-agpt-builder-ip" + networking.gke.io/managed-certificates: "autogpt-builder-cert" + kubernetes.io/ingress.allow-http: "true" + hosts: + - host: dev-builder.agpt.co + paths: + - path: / + pathType: Prefix + backend: + service: + name: autogpt-builder + port: 8000 + defaultBackend: + service: + name: autogpt-builder + port: + number: 8000 + +resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + +livenessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 6 +readinessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 6 + +domain: "dev-builder.agpt.co" + + +env: + APP_ENV: "dev" + NEXT_PUBLIC_AGPT_SERVER_URL: "http://agpt-server:8000/api" + GOOGLE_CLIENT_ID: "638488734936-ka0bvq73ub3h4cb6013s3lftsl5l04nu.apps.googleusercontent.com" + GOOGLE_CLIENT_SECRET: "" + NEXT_PUBLIC_SUPABASE_URL: "https://adfjtextkuilwuhzdjpf.supabase.co" + NEXT_PUBLIC_SUPABASE_ANON_KEY: "" \ No newline at end of file diff --git a/rnd/infra/helm/autogpt_builder/values.yaml b/rnd/infra/helm/autogpt_builder/values.yaml new file mode 100644 index 0000000000..3b4b64e6e6 --- /dev/null +++ b/rnd/infra/helm/autogpt_builder/values.yaml @@ -0,0 +1,76 @@ +# base values, environment specific variables should be specified/overwritten in environment values + +replicaCount: 1 + +image: + repository: us-east1-docker.pkg.dev/agpt-dev/agpt-builder-dev/agpt-builder-dev + pullPolicy: IfNotPresent + tag: "latest" + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +serviceAccount: + create: true + automount: true + annotations: {} + name: "" + +podAnnotations: {} +podLabels: {} + +podSecurityContext: {} + +securityContext: {} + +service: + type: ClusterIP + port: 80 + +ingress: + enabled: false + className: "" + annotations: {} + hosts: + - host: chart-example.local + paths: + - path: / + pathType: ImplementationSpecific + tls: [] + + +resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 512Mi + +livenessProbe: + httpGet: + path: / + port: http +readinessProbe: + httpGet: + path: / + port: http + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetMemoryUtilizationPercentage: 80 + +volumes: [] + +volumeMounts: [] + +nodeSelector: {} + +tolerations: [] + +affinity: {} + +domain: "" diff --git a/rnd/market/.env.example b/rnd/market/.env.example index 2187e8cc45..90935e7d6b 100644 --- a/rnd/market/.env.example +++ b/rnd/market/.env.example @@ -4,4 +4,7 @@ DB_PASS=pass123 DB_NAME=marketplace DB_PORT=5432 DATABASE_URL=postgresql://${DB_USER}:${DB_PASS}@localhost:${DB_PORT}/${DB_NAME} -SENTRY_DSN=Set correct url or dealete me \ No newline at end of file +SENTRY_DSN=Set correct url or dealete me + +ENABLE_AUTH=true +SUPABASE_JWT_SECRET=AAAAAAAA \ No newline at end of file diff --git a/rnd/market/market/app.py b/rnd/market/market/app.py index 2f7703500b..5696e8b356 100644 --- a/rnd/market/market/app.py +++ b/rnd/market/market/app.py @@ -1,4 +1,5 @@ import contextlib +import logging.config import os import dotenv @@ -12,12 +13,15 @@ import sentry_sdk.integrations.asyncio import sentry_sdk.integrations.fastapi import sentry_sdk.integrations.starlette +import market.config import market.routes.admin import market.routes.agents import market.routes.search dotenv.load_dotenv() +logging.config.dictConfig(market.config.LogConfig().model_dump()) + if os.environ.get("SENTRY_DSN"): sentry_sdk.init( dsn=os.environ.get("SENTRY_DSN"), diff --git a/rnd/market/market/config.py b/rnd/market/market/config.py new file mode 100644 index 0000000000..46a31b462a --- /dev/null +++ b/rnd/market/market/config.py @@ -0,0 +1,30 @@ +from pydantic import BaseModel + + +class LogConfig(BaseModel): + """Logging configuration to be set for the server""" + + LOGGER_NAME: str = "marketplace" + LOG_FORMAT: str = "%(levelprefix)s | %(asctime)s | %(message)s" + LOG_LEVEL: str = "DEBUG" + + # Logging config + version: int = 1 + disable_existing_loggers: bool = False + formatters: dict = { + "default": { + "()": "uvicorn.logging.DefaultFormatter", + "fmt": LOG_FORMAT, + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + } + handlers: dict = { + "default": { + "formatter": "default", + "class": "logging.StreamHandler", + "stream": "ext://sys.stderr", + }, + } + loggers: dict = { + LOGGER_NAME: {"handlers": ["default"], "level": LOG_LEVEL}, + } diff --git a/rnd/market/market/db.py b/rnd/market/market/db.py index acf0a32516..7615222663 100644 --- a/rnd/market/market/db.py +++ b/rnd/market/market/db.py @@ -1,6 +1,8 @@ +import datetime import typing import fuzzywuzzy.fuzz +import prisma.enums import prisma.errors import prisma.models import prisma.types @@ -61,6 +63,7 @@ async def create_agent_entry( keywords: typing.List[str], categories: typing.List[str], graph: prisma.Json, + submission_state: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.PENDING, ): """ Create a new agent entry in the database. @@ -89,6 +92,7 @@ async def create_agent_entry( "categories": categories, "graph": graph, "AnalyticsTracker": {"create": {"downloads": 0, "views": 0}}, + "submissionStatus": submission_state, } ) @@ -100,6 +104,39 @@ async def create_agent_entry( raise AgentQueryError(f"Unexpected error occurred: {str(e)}") +async def update_agent_entry( + agent_id: str, + version: int, + submission_state: prisma.enums.SubmissionStatus, + comments: str | None = None, +): + """ + Update an existing agent entry in the database. + + Args: + agent_id (str): The ID of the agent. + version (int): The version of the agent. + submission_state (prisma.enums.SubmissionStatus): The submission state of the agent. + """ + + try: + agent = await prisma.models.Agents.prisma().update( + where={"id": agent_id}, + data={ + "version": version, + "submissionStatus": submission_state, + "submissionReviewDate": datetime.datetime.now(datetime.timezone.utc), + "submissionReviewComments": comments, + }, + ) + + return agent + except prisma.errors.PrismaError as e: + raise AgentQueryError(f"Agent Update Failed Database query failed: {str(e)}") + except Exception as e: + raise AgentQueryError(f"Unexpected error occurred: {str(e)}") + + async def get_agents( page: int = 1, page_size: int = 10, @@ -108,6 +145,7 @@ async def get_agents( category: str | None = None, description: str | None = None, description_threshold: int = 60, + submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED, sort_by: str = "createdAt", sort_order: typing.Literal["desc"] | typing.Literal["asc"] = "desc", ): @@ -140,6 +178,8 @@ async def get_agents( if category: query["categories"] = {"has": category} + query["submissionStatus"] = submission_status + # Define sorting order = {sort_by: sort_order} diff --git a/rnd/market/market/model.py b/rnd/market/market/model.py index 71fea88fad..d7dbb2d6cf 100644 --- a/rnd/market/market/model.py +++ b/rnd/market/market/model.py @@ -1,6 +1,7 @@ import datetime import typing +import prisma.enums import pydantic @@ -11,6 +12,13 @@ class AddAgentRequest(pydantic.BaseModel): categories: list[str] +class SubmissionReviewRequest(pydantic.BaseModel): + agent_id: str + version: int + status: prisma.enums.SubmissionStatus + comments: str | None + + class AgentResponse(pydantic.BaseModel): """ Represents a response from an agent. @@ -36,6 +44,7 @@ class AgentResponse(pydantic.BaseModel): version: int createdAt: datetime.datetime updatedAt: datetime.datetime + submission_status: str views: int = 0 downloads: int = 0 diff --git a/rnd/market/market/routes/admin.py b/rnd/market/market/routes/admin.py index 40893a3814..75d77866d5 100644 --- a/rnd/market/market/routes/admin.py +++ b/rnd/market/market/routes/admin.py @@ -1,18 +1,30 @@ +import logging +import typing + +import autogpt_libs.auth import fastapi import prisma +import prisma.enums +import prisma.models import market.db import market.model +logger = logging.getLogger("marketplace") + router = fastapi.APIRouter() @router.post("/agent", response_model=market.model.AgentResponse) -async def create_agent_entry(request: market.model.AddAgentRequest): +async def create_agent_entry( + request: market.model.AddAgentRequest, + user: autogpt_libs.auth.User = fastapi.Depends( + autogpt_libs.auth.requires_admin_user + ), +): """ A basic endpoint to create a new agent entry in the database. - TODO: Protect endpoint! """ try: agent = await market.db.create_agent_entry( @@ -32,7 +44,13 @@ async def create_agent_entry(request: market.model.AddAgentRequest): @router.post("/agent/featured/{agent_id}") -async def set_agent_featured(agent_id: str, category: str = "featured"): +async def set_agent_featured( + agent_id: str, + category: str = "featured", + user: autogpt_libs.auth.User = fastapi.Depends( + autogpt_libs.auth.requires_admin_user + ), +): """ A basic endpoint to set an agent as featured in the database. """ @@ -48,7 +66,13 @@ async def set_agent_featured(agent_id: str, category: str = "featured"): @router.delete("/agent/featured/{agent_id}") -async def unset_agent_featured(agent_id: str, category: str = "featured"): +async def unset_agent_featured( + agent_id: str, + category: str = "featured", + user: autogpt_libs.auth.User = fastapi.Depends( + autogpt_libs.auth.requires_admin_user + ), +): """ A basic endpoint to unset an agent as featured in the database. """ @@ -61,3 +85,96 @@ async def unset_agent_featured(agent_id: str, category: str = "featured"): raise fastapi.HTTPException(status_code=500, detail=str(e)) except Exception as e: raise fastapi.HTTPException(status_code=500, detail=str(e)) + + +@router.get("/agent/submissions", response_model=market.model.AgentListResponse) +async def get_agent_submissions( + page: int = fastapi.Query(1, ge=1, description="Page number"), + page_size: int = fastapi.Query( + 10, ge=1, le=100, description="Number of items per page" + ), + name: typing.Optional[str] = fastapi.Query( + None, description="Filter by agent name" + ), + keyword: typing.Optional[str] = fastapi.Query( + None, description="Filter by keyword" + ), + category: typing.Optional[str] = fastapi.Query( + None, description="Filter by category" + ), + description: typing.Optional[str] = fastapi.Query( + None, description="Fuzzy search in description" + ), + description_threshold: int = fastapi.Query( + 60, ge=0, le=100, description="Fuzzy search threshold" + ), + sort_by: str = fastapi.Query("createdAt", description="Field to sort by"), + sort_order: typing.Literal["asc", "desc"] = fastapi.Query( + "desc", description="Sort order (asc or desc)" + ), + user: autogpt_libs.auth.User = fastapi.Depends( + autogpt_libs.auth.requires_admin_user + ), +): + logger.info("Getting agent submissions") + try: + result = await market.db.get_agents( + page=page, + page_size=page_size, + name=name, + keyword=keyword, + category=category, + description=description, + description_threshold=description_threshold, + sort_by=sort_by, + sort_order=sort_order, + submission_status=prisma.enums.SubmissionStatus.PENDING, + ) + + agents = [ + market.model.AgentResponse(**agent.dict()) for agent in result["agents"] + ] + + return market.model.AgentListResponse( + agents=agents, + total_count=result["total_count"], + page=result["page"], + page_size=result["page_size"], + total_pages=result["total_pages"], + ) + + except market.db.AgentQueryError as e: + logger.error(f"Error getting agent submissions: {e}") + raise fastapi.HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error getting agent submissions: {e}") + raise fastapi.HTTPException( + status_code=500, detail=f"An unexpected error occurred: {e}" + ) + + +@router.post("/agent/submissions") +async def review_submission( + review_request: market.model.SubmissionReviewRequest, + user: autogpt_libs.auth.User = fastapi.Depends( + autogpt_libs.auth.requires_admin_user + ), +): + """ + A basic endpoint to review a submission in the database. + """ + logger.info( + f"Reviewing submission: {review_request.agent_id}, {review_request.version}" + ) + try: + # await market.db.update_agent_entry( + # agent_id=review_request.agent_id, + # version=review_request.version, + # submission_state=review_request.status, + # comments=review_request.comments, + # ) + return fastapi.responses.Response(status_code=200) + except market.db.AgentQueryError as e: + raise fastapi.HTTPException(status_code=500, detail=str(e)) + except Exception as e: + raise fastapi.HTTPException(status_code=500, detail=str(e)) diff --git a/rnd/market/market/routes/admin_tests.py b/rnd/market/market/routes/admin_tests.py new file mode 100644 index 0000000000..c305e8c833 --- /dev/null +++ b/rnd/market/market/routes/admin_tests.py @@ -0,0 +1,76 @@ +import datetime +from unittest import mock + +import autogpt_libs.auth.middleware +import fastapi +import fastapi.testclient +import prisma.enums +import prisma.models + +import market.app + +client = fastapi.testclient.TestClient(market.app.app) + + +async def override_auth_middleware(request: fastapi.Request): + return {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"} + + +market.app.app.dependency_overrides[autogpt_libs.auth.middleware.auth_middleware] = ( + override_auth_middleware +) + + +def test_get_submissions(): + with mock.patch("market.db.get_agents") as mock_get_agents: + mock_get_agents.return_value = { + "agents": [], + "total_count": 0, + "page": 1, + "page_size": 10, + "total_pages": 0, + } + response = client.get( + "/api/v1/market/admin/agent/submissions?page=1&page_size=10&description_threshold=60&sort_by=createdAt&sort_order=desc", + headers={"Bearer": ""}, + ) + assert response.status_code == 200 + assert response.json() == { + "agents": [], + "total_count": 0, + "page": 1, + "page_size": 10, + "total_pages": 0, + } + + +def test_review_submission(): + with mock.patch("market.db.update_agent_entry") as mock_update_agent_entry: + mock_update_agent_entry.return_value = prisma.models.Agents( + id="aaa-bbb-ccc", + version=1, + createdAt=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"), + updatedAt=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"), + submissionStatus=prisma.enums.SubmissionStatus.APPROVED, + submissionDate=datetime.datetime.fromisoformat("2021-10-01T00:00:00+00:00"), + submissionReviewComments="Looks good", + submissionReviewDate=datetime.datetime.fromisoformat( + "2021-10-01T00:00:00+00:00" + ), + keywords=["test"], + categories=["test"], + graph='{"name": "test", "description": "test"}', # type: ignore + ) + response = client.post( + "/api/v1/market/admin/agent/submissions", + headers={ + "Authorization": "Bearer token" + }, # Assuming you need an authorization token + json={ + "agent_id": "aaa-bbb-ccc", + "version": 1, + "status": "APPROVED", + "comments": "Looks good", + }, + ) + assert response.status_code == 200 diff --git a/rnd/market/market/routes/agents.py b/rnd/market/market/routes/agents.py index e363a06472..adcfe80919 100644 --- a/rnd/market/market/routes/agents.py +++ b/rnd/market/market/routes/agents.py @@ -248,6 +248,7 @@ async def top_agents_by_downloads( updatedAt=item.agent.updatedAt, views=item.views, downloads=item.downloads, + submission_status=item.agent.submissionStatus, ) for item in result.analytics if item.agent is not None @@ -323,6 +324,7 @@ async def get_featured_agents( and len(item.agent.AnalyticsTracker) > 0 else 0 ), + submission_status=item.agent.submissionStatus, ) for item in result.featured_agents if item.agent is not None diff --git a/rnd/market/market/routes/submissions.py b/rnd/market/market/routes/submissions.py new file mode 100644 index 0000000000..49a4dea14e --- /dev/null +++ b/rnd/market/market/routes/submissions.py @@ -0,0 +1,35 @@ +import autogpt_libs.auth +import fastapi +import fastapi.responses +import prisma + +import market.db +import market.model +import market.utils.analytics + +router = fastapi.APIRouter() + + +@router.post("/agents/submit", response_model=market.model.AgentResponse) +async def submit_agent( + request: market.model.AddAgentRequest, + user: autogpt_libs.auth.User = fastapi.Depends(autogpt_libs.auth.requires_user), +): + """ + A basic endpoint to create a new agent entry in the database. + """ + try: + agent = await market.db.create_agent_entry( + request.graph["name"], + request.graph["description"], + request.author, + request.keywords, + request.categories, + prisma.Json(request.graph), + ) + + return fastapi.responses.PlainTextResponse(agent.model_dump_json()) + except market.db.AgentQueryError as e: + raise fastapi.HTTPException(status_code=500, detail=str(e)) + except Exception as e: + raise fastapi.HTTPException(status_code=500, detail=str(e)) diff --git a/rnd/market/migrations/20240808080208_added_submissions/migration.sql b/rnd/market/migrations/20240808080208_added_submissions/migration.sql new file mode 100644 index 0000000000..e30f090fba --- /dev/null +++ b/rnd/market/migrations/20240808080208_added_submissions/migration.sql @@ -0,0 +1,8 @@ +-- CreateEnum +CREATE TYPE "SubmissionStatus" AS ENUM ('PENDING', 'APPROVED', 'REJECTED'); + +-- AlterTable +ALTER TABLE "Agents" ADD COLUMN "submissionDate" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, +ADD COLUMN "submissionReviewComments" TEXT, +ADD COLUMN "submissionReviewDate" TIMESTAMP(3), +ADD COLUMN "submissionStatus" "SubmissionStatus" NOT NULL DEFAULT 'PENDING'; diff --git a/rnd/market/poetry.lock b/rnd/market/poetry.lock index 29abc6bc7f..8801d1638f 100644 --- a/rnd/market/poetry.lock +++ b/rnd/market/poetry.lock @@ -33,35 +33,52 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "autogpt-libs" +version = "0.1.0" +description = "Shared libraries across NextGen AutoGPT" +optional = false +python-versions = ">=3.10,<4.0" +files = [] +develop = false + +[package.dependencies] +pyjwt = "^2.8.0" +python-dotenv = "^1.0.1" + +[package.source] +type = "directory" +url = "../autogpt_libs" + [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -794,6 +811,23 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pyright" version = "1.1.374" @@ -1023,29 +1057,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.5" +version = "0.5.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, - {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, - {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, - {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, - {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, - {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, - {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, + {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, + {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, + {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, + {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, + {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, + {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, + {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, ] [[package]] @@ -1195,13 +1229,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.30.4" +version = "0.30.5" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.30.4-py3-none-any.whl", hash = "sha256:06b00e3087e58c6865c284143c0c42f810b32ff4f265ab19d08c566f74a08728"}, - {file = "uvicorn-0.30.4.tar.gz", hash = "sha256:00db9a9e3711a5fa59866e2b02fac69d8dc70ce0814aaec9a66d1d9e5c832a30"}, + {file = "uvicorn-0.30.5-py3-none-any.whl", hash = "sha256:b2d86de274726e9878188fa07576c9ceeff90a839e2b6e25c917fe05f5a6c835"}, + {file = "uvicorn-0.30.5.tar.gz", hash = "sha256:ac6fdbd4425c5fd17a9fe39daf4d4d075da6fdc80f653e5894cdc2fd98752bee"}, ] [package.dependencies] @@ -1259,4 +1293,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "f48dca64557d652682ac309935aa15d06d5a1f7b887b87af7f6bcca57d0a54ca" +content-hash = "5e9494ca4690f58c633eece2260881d9462f32849d66fa503ec7caba28db7031" diff --git a/rnd/market/pyproject.toml b/rnd/market/pyproject.toml index d2339f7b05..ab62bc43b5 100644 --- a/rnd/market/pyproject.toml +++ b/rnd/market/pyproject.toml @@ -19,6 +19,7 @@ fuzzywuzzy = "^0.18.0" python-levenshtein = "^0.25.1" # autogpt-server = { path = "../autogpt_server", develop = true } prometheus-fastapi-instrumentator = "^7.0.0" +autogpt-libs = {path = "../autogpt_libs"} [tool.poetry.group.dev.dependencies] diff --git a/rnd/market/schema.prisma b/rnd/market/schema.prisma index 9e4d020d36..337af45f35 100644 --- a/rnd/market/schema.prisma +++ b/rnd/market/schema.prisma @@ -11,12 +11,23 @@ generator client { partial_type_generator = "market/utils/partial_types.py" } +enum SubmissionStatus { + PENDING + APPROVED + REJECTED +} + model Agents { - id String @unique @default(dbgenerated("gen_random_uuid()")) @db.Uuid + id String @unique @default(dbgenerated("gen_random_uuid()")) @db.Uuid + version Int @default(1) + createdAt DateTime @default(now()) updatedAt DateTime @updatedAt - version Int @default(1) + submissionDate DateTime @default(now()) + submissionReviewDate DateTime? + submissionStatus SubmissionStatus @default(PENDING) + submissionReviewComments String? name String? description String?