From b364f9e58bb0e51dd989fc254e8fd64eb0dbac6b Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Wed, 8 Apr 2026 22:50:12 +0700 Subject: [PATCH] fix(backend): use raw SQL INSERT for PlatformCostLog to avoid Prisma Json type issue Prisma Python requires Json?-typed fields to be wrapped in prisma.fields.Json() which is a non-obvious footgun. The rest of platform_cost.py already uses execute_raw_with_schema / query_raw_with_schema for all its DB access, so align the INSERT to the same pattern. JSON metadata is serialized with json.dumps() and passed as a plain text parameter. --- .../backend/backend/data/platform_cost.py | 64 +++++++++++-------- 1 file changed, 38 insertions(+), 26 deletions(-) diff --git a/autogpt_platform/backend/backend/data/platform_cost.py b/autogpt_platform/backend/backend/data/platform_cost.py index 7d922b9f92..1c4c57f460 100644 --- a/autogpt_platform/backend/backend/data/platform_cost.py +++ b/autogpt_platform/backend/backend/data/platform_cost.py @@ -1,13 +1,12 @@ import asyncio +import json import logging from datetime import datetime, timedelta, timezone from typing import Any -from prisma.fields import Json -from prisma.models import PlatformCostLog as PrismaLog from pydantic import BaseModel -from backend.data.db import query_raw_with_schema +from backend.data.db import execute_raw_with_schema, query_raw_with_schema from backend.util.cache import cached logger = logging.getLogger(__name__) @@ -52,29 +51,42 @@ class PlatformCostEntry(BaseModel): async def log_platform_cost(entry: PlatformCostEntry) -> None: - await PrismaLog.prisma().create( - data={ - "userId": entry.user_id, - "graphExecId": entry.graph_exec_id, - "nodeExecId": entry.node_exec_id, - "graphId": entry.graph_id, - "nodeId": entry.node_id, - "blockId": entry.block_id, - "blockName": entry.block_name, - # Normalize to lowercase so the (provider, createdAt) index is always - # used without LOWER() on the read side. - "provider": entry.provider.lower(), - "credentialId": entry.credential_id, - "costMicrodollars": entry.cost_microdollars, - "inputTokens": entry.input_tokens, - "outputTokens": entry.output_tokens, - "dataSize": entry.data_size, - "duration": entry.duration, - "model": entry.model, - "trackingType": entry.tracking_type, - "trackingAmount": entry.tracking_amount, - "metadata": Json(entry.metadata) if entry.metadata is not None else None, - } + await execute_raw_with_schema( + """ + INSERT INTO {schema_prefix}"PlatformCostLog" ( + "id", "userId", "graphExecId", "nodeExecId", "graphId", "nodeId", + "blockId", "blockName", "provider", "credentialId", + "costMicrodollars", "inputTokens", "outputTokens", + "dataSize", "duration", "model", + "trackingType", "trackingAmount", "metadata" + ) VALUES ( + gen_random_uuid(), $1, $2, $3, $4, $5, + $6, $7, $8, $9, + $10, $11, $12, + $13, $14, $15, + $16, $17, $18 + ) + """, + entry.user_id, + entry.graph_exec_id, + entry.node_exec_id, + entry.graph_id, + entry.node_id, + entry.block_id, + entry.block_name, + # Normalize to lowercase so the (provider, createdAt) index is always + # used without LOWER() on the read side. + entry.provider.lower(), + entry.credential_id, + entry.cost_microdollars, + entry.input_tokens, + entry.output_tokens, + entry.data_size, + entry.duration, + entry.model, + entry.tracking_type, + entry.tracking_amount, + json.dumps(entry.metadata) if entry.metadata is not None else None, )