Compare commits

...

22 Commits

Author SHA1 Message Date
Aarushi
8aff0cd768 fix docker issues with market, and DB connection 2024-09-13 15:55:22 +01:00
Nicholas Tindle
0afbe26df2 Merge branch 'master' into ntindle/secrt-848-analytics-instrumentation-log-user-activityevents-metrics 2024-09-12 11:47:29 -05:00
Nicholas Tindle
87e2a8efc1 Merge branch 'master' into ntindle/secrt-848-analytics-instrumentation-log-user-activityevents-metrics 2024-09-12 10:55:55 -05:00
Nicholas Tindle
0d050c8b12 fix: market port mess 2024-09-12 10:35:28 -05:00
Nicholas Tindle
ddbacf35e4 ref(server): refactor how we do raw analytics 2024-09-12 10:35:18 -05:00
Nicholas Tindle
1e3208a3c6 feat: raw metrics 2024-09-11 15:37:48 -05:00
Nicholas Tindle
1b14a97fa1 fix: linting 2024-09-11 15:24:03 -05:00
Nicholas Tindle
b770a7b044 feat: user create analytics 2024-09-11 15:20:44 -05:00
Nicholas Tindle
e608491b70 fix: merge conflict 2024-09-11 13:20:21 -05:00
Nicholas Tindle
cc8b932bb4 feat(builder): use better tracking mechanism 2024-09-11 13:17:14 -05:00
Nicholas Tindle
b8b6e1ccb2 Merge branch 'master' into ntindle/secrt-848-analytics-instrumentation-log-user-activityevents-metrics 2024-09-11 13:06:01 -05:00
Nicholas Tindle
e604264f82 fix: linting 2024-09-10 22:03:55 -05:00
Nicholas Tindle
ae95bad6fb feat(builder): add page view tracking 2024-09-10 21:23:21 -05:00
Nicholas Tindle
612ceeea62 feat(server): counting analytics 2024-09-10 17:54:10 -05:00
Nicholas Tindle
0b3ca7bad5 fix(server): missed index update 2024-09-10 12:45:46 -05:00
Nicholas Tindle
8cb3dd712d feat(server, builder): allow string indexes 2024-09-10 12:44:49 -05:00
Nicholas Tindle
212b04c1f4 fix(server): revert accidental undo changes 2024-09-10 12:41:41 -05:00
Nicholas Tindle
e27f5bb2e5 feat(builder): not tested step logging 2024-09-10 12:31:14 -05:00
Nicholas Tindle
7154f80927 feat(builder): client side api implementation 2024-09-10 12:30:49 -05:00
Nicholas Tindle
943e3241c9 fix: move to routers 2024-09-10 12:29:22 -05:00
Nicholas Tindle
f55a7c2475 fix: linting 2024-09-10 10:10:15 -05:00
Nicholas Tindle
2bcf373f0f feat(server): first backend endpoint 2024-09-09 20:28:20 -05:00
26 changed files with 572 additions and 25 deletions

View File

@@ -1,6 +1,6 @@
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8005/api/v1/market
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
## Supabase credentials
## YOU ONLY NEED THEM IF YOU WANT TO USE SUPABASE USER AUTHENTICATION

View File

@@ -3,6 +3,7 @@ import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation";
import { createServerClient } from "@/lib/supabase/server";
import { z } from "zod";
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
const loginFormSchema = z.object({
email: z.string().email().min(2).max(64),
@@ -32,6 +33,7 @@ export async function login(values: z.infer<typeof loginFormSchema>) {
}
export async function signup(values: z.infer<typeof loginFormSchema>) {
"use server";
const supabase = createServerClient();
if (!supabase) {
@@ -48,6 +50,16 @@ export async function signup(values: z.infer<typeof loginFormSchema>) {
if (data.session) {
await supabase.auth.setSession(data.session);
}
if (data.user) {
const api = new AutoGPTServerAPI();
api.logCreateUser({
email: values.email,
user_id: data.user.id,
name: values.email,
username: values.email,
});
}
revalidatePath("/", "layout");
redirect("/profile");

View File

@@ -7,7 +7,7 @@ import AgentDetailContent from "@/components/marketplace/AgentDetailContent";
async function getAgentDetails(id: string): Promise<AgentDetailResponse> {
const apiUrl =
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
"http://localhost:8001/api/v1/market";
"http://localhost:8015/api/v1/market";
const api = new MarketplaceAPI(apiUrl);
try {
console.log(`Fetching agent details for id: ${id}`);

View File

@@ -185,7 +185,7 @@ const Pagination: React.FC<{
const Marketplace: React.FC = () => {
const apiUrl =
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
"http://localhost:8001/api/v1/market";
"http://localhost:8015/api/v1/market";
const api = useMemo(() => new MarketplaceAPI(apiUrl), [apiUrl]);
const [searchValue, setSearchValue] = useState("");

View File

@@ -5,12 +5,15 @@ import { ThemeProvider as NextThemesProvider } from "next-themes";
import { ThemeProviderProps } from "next-themes/dist/types";
import { TooltipProvider } from "@/components/ui/tooltip";
import SupabaseProvider from "@/components/SupabaseProvider";
import { PageViewProvider } from "@/components/providers/PageViewProvider";
export function Providers({ children, ...props }: ThemeProviderProps) {
return (
<NextThemesProvider {...props}>
<SupabaseProvider>
<TooltipProvider>{children}</TooltipProvider>
<PageViewProvider>
<TooltipProvider>{children}</TooltipProvider>
</PageViewProvider>
</SupabaseProvider>
</NextThemesProvider>
);

View File

@@ -0,0 +1,11 @@
"use server";
import { TutorialStepData } from "@/lib/autogpt-server-api/types";
import AutoGPTServerAPI from "@/lib/autogpt-server-api/client";
export const sendTutorialStep = async (data: TutorialStepData) => {
console.log("sendTutorialStep", data);
const api = new AutoGPTServerAPI();
await api.logTutorialStep(data);
};

View File

@@ -81,7 +81,7 @@ function convertGraphToReactFlow(graph: any): { nodes: Node[]; edges: Edge[] } {
async function installGraph(id: string): Promise<void> {
const apiUrl =
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
"http://localhost:8001/api/v1/market";
"http://localhost:8015/api/v1/market";
const api = new MarketplaceAPI(apiUrl);
const serverAPIUrl = process.env.AGPT_SERVER_API_URL;

View File

@@ -0,0 +1,36 @@
import React, { createContext, useContext, useEffect } from "react";
import { usePathname, useSearchParams } from "next/navigation";
import logPageViewAction from "./actions";
const EXCLUDED_PATHS = ["/login"];
const PageViewContext = createContext<null>(null);
export const PageViewProvider: React.FC<{ children: React.ReactNode }> = ({
children,
}) => {
const pathname = usePathname();
const searchParams = useSearchParams();
useEffect(() => {
if (EXCLUDED_PATHS.includes(pathname)) {
return;
}
const logPageView = async () => {
const pageViewData = {
page: pathname,
data: Object.fromEntries(searchParams.entries()),
};
await logPageViewAction(pageViewData.page, pageViewData.data);
};
logPageView().catch(console.error);
}, [pathname, searchParams]);
return (
<PageViewContext.Provider value={null}>{children}</PageViewContext.Provider>
);
};
export const usePageViews = () => useContext(PageViewContext);

View File

@@ -0,0 +1,6 @@
import AutoGPTServerAPI from "@/lib/autogpt-server-api/client";
export default function logPageViewAction(page: string, data: any) {
const apiClient = new AutoGPTServerAPI();
apiClient.logPageView({ page, data });
}

View File

@@ -1,5 +1,6 @@
import Shepherd from "shepherd.js";
import "shepherd.js/dist/css/shepherd.css";
import { sendTutorialStep } from "./build/actions";
export const startTutorial = (
setPinBlocksPopover: (value: boolean) => void,
@@ -493,6 +494,17 @@ export const startTutorial = (
localStorage.setItem("shepherd-tour", "completed"); // Optionally mark the tutorial as completed
});
for (const step of tour.steps) {
step.on("complete", () => {
console.log("sendTutorialStep");
sendTutorialStep({
step: step.id,
data: {},
});
});
}
tour.on("cancel", () => {
setPinBlocksPopover(false);
localStorage.setItem("shepherd-tour", "canceled"); // Optionally mark the tutorial as canceled

View File

@@ -8,6 +8,9 @@ import {
GraphExecuteResponse,
NodeExecutionResult,
User,
UserData,
TutorialStepData,
PageViewData,
} from "./types";
export default class AutoGPTServerAPI {
@@ -32,6 +35,21 @@ export default class AutoGPTServerAPI {
return this._request("POST", "/auth/user", {});
}
// Analytics
async logCreateUser(userData: UserData): Promise<string> {
return this._request("POST", "/analytics/log_new_user", userData);
}
async logTutorialStep(data: TutorialStepData): Promise<string> {
return this._request("POST", "/analytics/log_tutorial_step", data);
}
async logPageView(pageViewData: PageViewData): Promise<string> {
return this._request("POST", "/analytics/log_page_view", {
page_view_data: pageViewData,
});
}
async getBlocks(): Promise<Block[]> {
return await this._get("/blocks");
}

View File

@@ -190,3 +190,20 @@ export enum BlockUIType {
OUTPUT = "Output",
NOTE = "Note",
}
export type UserData = {
user_id: string;
email: string;
name: string;
username: string;
};
export type TutorialStepData = {
step: string;
data: { [key: string]: any };
};
export type PageViewData = {
page: string;
data: { [key: string]: any };
};

View File

@@ -17,7 +17,7 @@ export default class MarketplaceAPI {
constructor(
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
"http://localhost:8001/api/v1/market",
"http://localhost:8015/api/v1/market",
) {
this.baseUrl = baseUrl;
}

View File

@@ -1,3 +1,6 @@
{
"python.analysis.typeCheckingMode": "basic",
"python.testing.pytestArgs": ["test"],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
}

View File

@@ -0,0 +1,111 @@
import logging
import prisma.enums
import prisma.types
logger = logging.getLogger(__name__)
async def log_raw_analytics(
user_id: str,
type: prisma.enums.AnalyticsType,
data: dict,
data_index: str,
):
details = await prisma.models.AnalyticsDetails.prisma().create(
data={
"userId": user_id,
"type": type,
"data": prisma.Json(data),
"dataIndex": data_index,
}
)
return details
async def log_raw_metric(
user_id: str,
metric_name: prisma.enums.AnalyticsMetric,
aggregation_type: prisma.enums.AggregationType,
metric_value: float,
data_string: str,
):
if metric_value < 0:
raise ValueError("metric_value must be non-negative")
if aggregation_type == prisma.enums.AggregationType.NO_AGGREGATION:
value_increment = metric_value
counter_increment = 0
elif aggregation_type in [
prisma.enums.AggregationType.COUNT,
prisma.enums.AggregationType.SUM,
]:
value_increment = metric_value
counter_increment = 1
elif aggregation_type in [
prisma.enums.AggregationType.AVG,
prisma.enums.AggregationType.MAX,
prisma.enums.AggregationType.MIN,
]:
value_increment = 0 # These will be handled differently in a separate query
counter_increment = 1
else:
raise ValueError(f"Unsupported aggregation_type: {aggregation_type}")
result = await prisma.models.AnalyticsMetrics.prisma().upsert(
data={
"update": {
"value": {"increment": value_increment},
"aggregationCounter": {"increment": counter_increment},
},
"create": {
"value": metric_value,
"analyticMetric": metric_name,
"userId": user_id,
"dataString": data_string,
"aggregationType": aggregation_type,
"aggregationCounter": 1,
},
},
where={
"analyticMetric_userId_dataString_aggregationType": {
"analyticMetric": metric_name,
"userId": user_id,
"dataString": data_string,
"aggregationType": aggregation_type,
}
},
)
# For AVG, MAX, and MIN, we need to perform additional operations
if aggregation_type in [
prisma.enums.AggregationType.AVG,
prisma.enums.AggregationType.MAX,
prisma.enums.AggregationType.MIN,
]:
existing = await prisma.models.AnalyticsMetrics.prisma().find_unique(
where={
"analyticMetric_userId_dataString_aggregationType": {
"analyticMetric": metric_name,
"userId": user_id,
"dataString": data_string,
"aggregationType": aggregation_type,
}
}
)
if existing:
if aggregation_type == prisma.enums.AggregationType.AVG:
new_value = (
existing.value * existing.aggregationCounter + metric_value
) / (existing.aggregationCounter + 1)
elif aggregation_type == prisma.enums.AggregationType.MAX:
new_value = max(existing.value, metric_value)
else: # MIN
new_value = min(existing.value, metric_value)
result = await prisma.models.AnalyticsMetrics.prisma().update(
data={"value": new_value}, where={"id": existing.id}
)
if not result:
raise ValueError(f"Failed to update metric: {existing.id}")
return result

View File

@@ -76,131 +76,168 @@ class AgentServer(AppService):
api_router.dependencies.append(Depends(auth_middleware))
# Import & Attach sub-routers
from .integrations import integrations_api_router
import autogpt_server.server.routers.analytics
import autogpt_server.server.routers.integrations
api_router.include_router(integrations_api_router, prefix="/integrations")
api_router.include_router(
autogpt_server.server.routers.integrations.router,
prefix="/integrations",
tags=["integrations"],
dependencies=[Depends(auth_middleware)],
)
api_router.include_router(
autogpt_server.server.routers.analytics.router,
prefix="/analytics",
tags=["analytics"],
dependencies=[Depends(auth_middleware)],
)
api_router.add_api_route(
path="/auth/user",
endpoint=self.get_or_create_user_route,
methods=["POST"],
tags=["auth"],
)
api_router.add_api_route(
path="/blocks",
endpoint=self.get_graph_blocks,
methods=["GET"],
tags=["blocks"],
)
api_router.add_api_route(
path="/blocks/{block_id}/execute",
endpoint=self.execute_graph_block,
methods=["POST"],
tags=["blocks"],
)
api_router.add_api_route(
path="/graphs",
endpoint=self.get_graphs,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/templates",
endpoint=self.get_templates,
methods=["GET"],
tags=["templates", "graphs"],
)
api_router.add_api_route(
path="/graphs",
endpoint=self.create_new_graph,
methods=["POST"],
tags=["graphs"],
)
api_router.add_api_route(
path="/templates",
endpoint=self.create_new_template,
methods=["POST"],
tags=["templates", "graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}",
endpoint=self.get_graph,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/templates/{graph_id}",
endpoint=self.get_template,
methods=["GET"],
tags=["templates", "graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}",
endpoint=self.update_graph,
methods=["PUT"],
tags=["graphs"],
)
api_router.add_api_route(
path="/templates/{graph_id}",
endpoint=self.update_graph,
methods=["PUT"],
tags=["templates", "graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/versions",
endpoint=self.get_graph_all_versions,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/templates/{graph_id}/versions",
endpoint=self.get_graph_all_versions,
methods=["GET"],
tags=["templates", "graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/versions/{version}",
endpoint=self.get_graph,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/versions/active",
endpoint=self.set_graph_active_version,
methods=["PUT"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/input_schema",
endpoint=self.get_graph_input_schema,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/execute",
endpoint=self.execute_graph,
methods=["POST"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/executions",
endpoint=self.list_graph_runs,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/executions/{graph_exec_id}",
endpoint=self.get_graph_run_node_execution_results,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
endpoint=self.stop_graph_run,
methods=["POST"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/schedules",
endpoint=self.create_schedule,
methods=["POST"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/{graph_id}/schedules",
endpoint=self.get_execution_schedules,
methods=["GET"],
tags=["graphs"],
)
api_router.add_api_route(
path="/graphs/schedules/{schedule_id}",
endpoint=self.update_schedule,
methods=["PUT"],
tags=["graphs"],
)
api_router.add_api_route(
path="/settings",
endpoint=self.update_configuration,
methods=["POST"],
tags=["settings"],
)
app.add_exception_handler(500, self.handle_internal_http_error)

View File

@@ -0,0 +1,138 @@
# Analytics API
from typing import Annotated, Optional
import fastapi
import prisma
import prisma.enums
import pydantic
import autogpt_server.data.analytics
from autogpt_server.server.utils import get_user_id
router = fastapi.APIRouter()
class UserData(pydantic.BaseModel):
user_id: str
email: str
name: str
username: str
@router.post(path="/log_new_user")
async def log_create_user(
user_id: Annotated[str, fastapi.Depends(get_user_id)],
user_data: Annotated[
UserData, fastapi.Body(..., embed=True, description="The user data to log")
],
):
"""
Log the user ID for analytics purposes.
"""
result = await autogpt_server.data.analytics.log_raw_analytics(
user_id,
prisma.enums.AnalyticsType.CREATE_USER,
user_data.model_dump(),
"",
)
return result.id
@router.post(path="/log_tutorial_step")
async def log_tutorial_step(
user_id: Annotated[str, fastapi.Depends(get_user_id)],
step: Annotated[str, fastapi.Body(..., embed=True)],
data: Annotated[
Optional[dict],
fastapi.Body(..., embed=True, description="Any additional data to log"),
],
):
"""
Log the tutorial step completed by the user for analytics purposes.
"""
result = await autogpt_server.data.analytics.log_raw_analytics(
user_id,
prisma.enums.AnalyticsType.TUTORIAL_STEP,
data or {},
step,
)
await autogpt_server.data.analytics.log_raw_metric(
user_id=user_id,
metric_name=prisma.enums.AnalyticsMetric.TUTORIAL_STEP_COMPLETION,
aggregation_type=prisma.enums.AggregationType.COUNT,
metric_value=1,
data_string=step,
)
return result.id
class PageViewData(pydantic.BaseModel):
page: str = pydantic.Field(description="The page viewed")
data: Optional[dict] = pydantic.Field(
default_factory=dict, description="Any additional data to log"
)
@router.post(path="/log_page_view")
async def log_page_view(
user_id: Annotated[str, fastapi.Depends(get_user_id)],
page_view_data: Annotated[PageViewData, fastapi.Body(..., embed=True)],
):
"""
Log the page view for analytics purposes.
"""
await autogpt_server.data.analytics.log_raw_metric(
user_id=user_id,
metric_name=prisma.enums.AnalyticsMetric.PAGE_VIEW,
aggregation_type=prisma.enums.AggregationType.COUNT,
metric_value=1,
data_string=page_view_data.page,
)
result = await autogpt_server.data.analytics.log_raw_analytics(
user_id=user_id,
type=prisma.enums.AnalyticsType.WEB_PAGE,
data=page_view_data.data or {},
data_index=page_view_data.page,
)
return result.id
@router.post(path="/log_raw_metric")
async def log_raw_metric(
user_id: Annotated[str, fastapi.Depends(get_user_id)],
metric_name: Annotated[prisma.enums.AnalyticsMetric, fastapi.Body(..., embed=True)],
aggregation_type: Annotated[
prisma.enums.AggregationType, fastapi.Body(..., embed=True)
],
metric_value: Annotated[float, fastapi.Body(..., embed=True)],
data_string: Annotated[str, fastapi.Body(..., embed=True)],
):
result = await autogpt_server.data.analytics.log_raw_metric(
user_id, metric_name, aggregation_type, metric_value, data_string
)
return result.id
@router.post("/log_raw_analytics")
async def log_raw_analytics(
user_id: Annotated[str, fastapi.Depends(get_user_id)],
type: Annotated[prisma.enums.AnalyticsType, fastapi.Body(..., embed=True)],
data: Annotated[
dict,
fastapi.Body(..., embed=True, description="The data to log"),
],
data_index: Annotated[
str,
fastapi.Body(
...,
embed=True,
description="Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc.",
),
],
):
result = await autogpt_server.data.analytics.log_raw_analytics(
user_id, type, data, data_index
)
return result.id

View File

@@ -11,11 +11,11 @@ from supabase import Client
from autogpt_server.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
from autogpt_server.util.settings import Settings
from .utils import get_supabase, get_user_id
from ..utils import get_supabase, get_user_id
logger = logging.getLogger(__name__)
settings = Settings()
integrations_api_router = APIRouter()
router = APIRouter()
def get_store(supabase: Client = Depends(get_supabase)):
@@ -26,7 +26,7 @@ class LoginResponse(BaseModel):
login_url: str
@integrations_api_router.get("/{provider}/login")
@router.get("/{provider}/login")
async def login(
provider: Annotated[str, Path(title="The provider to initiate an OAuth flow for")],
user_id: Annotated[str, Depends(get_user_id)],
@@ -52,7 +52,7 @@ class CredentialsMetaResponse(BaseModel):
credentials_type: Literal["oauth2", "api_key"]
@integrations_api_router.post("/{provider}/callback")
@router.post("/{provider}/callback")
async def callback(
provider: Annotated[str, Path(title="The target provider for this OAuth exchange")],
code: Annotated[str, Body(title="Authorization code acquired by user login")],

View File

@@ -0,0 +1,53 @@
-- CreateEnum
CREATE TYPE "AnalyticsType" AS ENUM ('CREATE_USER', 'TUTORIAL_STEP', 'WEB_PAGE', 'AGENT_GRAPH_EXECUTION', 'AGENT_NODE_EXECUTION');
-- CreateEnum
CREATE TYPE "AnalyticsMetric" AS ENUM ('PAGE_VIEW', 'TUTORIAL_STEP_COMPLETION', 'AGENT_GRAPH_EXECUTION', 'AGENT_NODE_EXECUTION');
-- CreateEnum
CREATE TYPE "AggregationType" AS ENUM ('COUNT', 'SUM', 'AVG', 'MAX', 'MIN', 'NO_AGGREGATION');
-- CreateTable
CREATE TABLE "AnalyticsDetails" (
"id" TEXT NOT NULL DEFAULT gen_random_uuid(),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"userId" TEXT NOT NULL,
"type" "AnalyticsType" NOT NULL,
"data" JSONB,
"dataIndex" INTEGER,
CONSTRAINT "AnalyticsDetails_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "AnalyticsMetrics" (
"id" TEXT NOT NULL DEFAULT gen_random_uuid(),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"analyticMetric" "AnalyticsMetric" NOT NULL,
"value" DOUBLE PRECISION NOT NULL,
"dataString" TEXT,
"aggregationType" "AggregationType" NOT NULL DEFAULT 'NO_AGGREGATION',
"userId" TEXT NOT NULL,
CONSTRAINT "AnalyticsMetrics_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "analyticsDetails" ON "AnalyticsDetails"("userId", "type");
-- CreateIndex
CREATE INDEX "AnalyticsDetails_type_idx" ON "AnalyticsDetails"("type");
-- CreateIndex
CREATE INDEX "analytics_metric_index" ON "AnalyticsMetrics"("analyticMetric", "userId", "dataString", "aggregationType");
-- CreateIndex
CREATE UNIQUE INDEX "AnalyticsMetrics_analyticMetric_userId_dataString_aggregati_key" ON "AnalyticsMetrics"("analyticMetric", "userId", "dataString", "aggregationType");
-- AddForeignKey
ALTER TABLE "AnalyticsDetails" ADD CONSTRAINT "AnalyticsDetails_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "AnalyticsMetrics" ADD CONSTRAINT "AnalyticsMetrics_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "AnalyticsMetrics" ADD COLUMN "aggregationCounter" INTEGER DEFAULT 1;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "AnalyticsDetails" ALTER COLUMN "dataIndex" SET DATA TYPE TEXT;

View File

@@ -0,0 +1,8 @@
/*
Warnings:
- Made the column `aggregationCounter` on table `AnalyticsMetrics` required. This step will fail if there are existing NULL values in that column.
*/
-- AlterTable
ALTER TABLE "AnalyticsMetrics" ALTER COLUMN "aggregationCounter" SET NOT NULL;

View File

@@ -22,6 +22,8 @@ model User {
AgentGraphs AgentGraph[]
AgentGraphExecutions AgentGraphExecution[]
AgentGraphExecutionSchedules AgentGraphExecutionSchedule[]
AnalyticsDetails AnalyticsDetails[]
AnalyticsMetrics AnalyticsMetrics[]
@@index([id])
@@index([email])
@@ -29,9 +31,9 @@ model User {
// This model describes the Agent Graph/Flow (Multi Agent System).
model AgentGraph {
id String @default(uuid())
version Int @default(1)
createdAt DateTime @default(now())
id String @default(uuid())
version Int @default(1)
createdAt DateTime @default(now())
updatedAt DateTime? @updatedAt
name String?
@@ -115,8 +117,8 @@ model AgentBlock {
// This model describes the execution of an AgentGraph.
model AgentGraphExecution {
id String @id @default(uuid())
createdAt DateTime @default(now())
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime? @updatedAt
agentGraphId String
@@ -178,8 +180,8 @@ model AgentNodeExecutionInputOutput {
// This model describes the recurring execution schedule of an Agent.
model AgentGraphExecutionSchedule {
id String @id
createdAt DateTime @default(now())
id String @id
createdAt DateTime @default(now())
updatedAt DateTime? @updatedAt
agentGraphId String
@@ -199,3 +201,79 @@ model AgentGraphExecutionSchedule {
@@index([isEnabled])
}
enum AnalyticsType {
CREATE_USER
TUTORIAL_STEP
WEB_PAGE
AGENT_GRAPH_EXECUTION
AGENT_NODE_EXECUTION
}
model AnalyticsDetails {
// PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma
// typical uuid() inserts are handled by prisma
id String @id @default(dbgenerated("gen_random_uuid()"))
createdAt DateTime @default(now())
updatedAt DateTime @default(now()) @updatedAt
// Link to User model
userId String
user User @relation(fields: [userId], references: [id])
// Analytics Categorical data used for filtering (indexable w and w/o userId)
type AnalyticsType
// Analytic Specific Data. We should use a union type here, but prisma doesn't support it.
data Json?
// Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc.
dataIndex String?
@@index([userId, type], name: "analyticsDetails")
@@index([type])
}
enum AnalyticsMetric {
PAGE_VIEW
TUTORIAL_STEP_COMPLETION
AGENT_GRAPH_EXECUTION
AGENT_NODE_EXECUTION
}
enum AggregationType {
COUNT
SUM
AVG
MAX
MIN
NO_AGGREGATION
}
model AnalyticsMetrics {
id String @id @default(dbgenerated("gen_random_uuid()"))
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
// Analytics Categorical data used for filtering (indexable w and w/o userId)
analyticMetric AnalyticsMetric
// Any numeric data that should be counted upon, summed, or otherwise aggregated.
value Float
// Any string data that should be used to identify the metric as distinct.
// ex: '/build' vs '/market'
dataString String?
// Data Aggregation Type
aggregationType AggregationType @default(NO_AGGREGATION)
// Aggregation Counter used for aggregation style events that beenefit from it. (AVG) (not self incrementing)
aggregationCounter Int @default(1)
// Link to User model
userId String
user User @relation(fields: [userId], references: [id])
// Allows us to have unique but useful user level metrics.
@@unique([analyticMetric, userId, dataString, aggregationType])
@@index(fields: [analyticMetric, userId, dataString, aggregationType], name: "analytics_metric_index")
}

View File

@@ -161,9 +161,9 @@ services:
migrate:
condition: service_completed_successfully
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60&schema=market
ports:
- "8015:8000"
- "8015:8015"
networks:
- app-network

View File

@@ -56,6 +56,6 @@ WORKDIR /app/rnd/market
FROM server_dependencies AS server
ENV DATABASE_URL=""
ENV PORT=8000
ENV PORT=8015
CMD ["uvicorn", "market.app:app", "--reload"]
CMD ["poetry", "run", "app"]

View File

@@ -45,7 +45,7 @@ def populate_database():
keywords=["test"],
)
response = requests.post(
"http://localhost:8001/api/v1/market/admin/agent", json=req.model_dump()
"http://localhost:8015/api/v1/market/admin/agent", json=req.model_dump()
)
print(response.text)
@@ -59,7 +59,7 @@ def format():
def app():
port = os.getenv("PORT", "8015")
run("uvicorn", "market.app:app", "--reload", "--port", port)
run("uvicorn", "market.app:app", "--reload", "--port", port, "--host", "0.0.0.0")
def setup():