mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-24 06:28:04 -05:00
Compare commits
5 Commits
swiftyos/p
...
fix/reduce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0da7a54952 | ||
|
|
ba0aa83de3 | ||
|
|
4c9333fc37 | ||
|
|
03c805ac5a | ||
|
|
24d91a6e3e |
@@ -178,10 +178,5 @@ AYRSHARE_JWT_KEY=
|
||||
SMARTLEAD_API_KEY=
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# PostHog Analytics
|
||||
# Get API key from https://posthog.com - Project Settings > Project API Key
|
||||
POSTHOG_API_KEY=
|
||||
POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
# Other Services
|
||||
AUTOMOD_API_KEY=
|
||||
|
||||
@@ -48,7 +48,6 @@ from .response_model import (
|
||||
StreamUsage,
|
||||
)
|
||||
from .tools import execute_tool, tools
|
||||
from .tracking import track_user_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -104,33 +103,16 @@ async def _build_system_prompt(user_id: str | None) -> tuple[str, Any]:
|
||||
return compiled, understanding
|
||||
|
||||
|
||||
async def _generate_session_title(
|
||||
message: str,
|
||||
user_id: str | None = None,
|
||||
session_id: str | None = None,
|
||||
) -> str | None:
|
||||
async def _generate_session_title(message: str) -> str | None:
|
||||
"""Generate a concise title for a chat session based on the first message.
|
||||
|
||||
Args:
|
||||
message: The first user message in the session
|
||||
user_id: User ID for OpenRouter tracing (optional)
|
||||
session_id: Session ID for OpenRouter tracing (optional)
|
||||
|
||||
Returns:
|
||||
A short title (3-6 words) or None if generation fails
|
||||
"""
|
||||
try:
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {}
|
||||
if user_id:
|
||||
extra_body["user"] = user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = user_id
|
||||
if session_id:
|
||||
extra_body["session_id"] = session_id[:128] # OpenRouter limit
|
||||
extra_body["posthogProperties"] = {
|
||||
"environment": settings.config.app_env.value,
|
||||
}
|
||||
|
||||
response = await client.chat.completions.create(
|
||||
model=config.title_model,
|
||||
messages=[
|
||||
@@ -145,7 +127,6 @@ async def _generate_session_title(
|
||||
{"role": "user", "content": message[:500]}, # Limit input length
|
||||
],
|
||||
max_tokens=20,
|
||||
extra_body=extra_body,
|
||||
)
|
||||
title = response.choices[0].message.content
|
||||
if title:
|
||||
@@ -256,14 +237,6 @@ async def stream_chat_completion(
|
||||
f"new message_count={len(session.messages)}"
|
||||
)
|
||||
|
||||
# Track user message in PostHog
|
||||
if is_user_message:
|
||||
track_user_message(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
message_length=len(message),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Upserting session: {session.session_id} with user id {session.user_id}, "
|
||||
f"message_count={len(session.messages)}"
|
||||
@@ -283,15 +256,10 @@ async def stream_chat_completion(
|
||||
# stale data issues when the main flow modifies the session
|
||||
captured_session_id = session_id
|
||||
captured_message = message
|
||||
captured_user_id = user_id
|
||||
|
||||
async def _update_title():
|
||||
try:
|
||||
title = await _generate_session_title(
|
||||
captured_message,
|
||||
user_id=captured_user_id,
|
||||
session_id=captured_session_id,
|
||||
)
|
||||
title = await _generate_session_title(captured_message)
|
||||
if title:
|
||||
# Use dedicated title update function that doesn't
|
||||
# touch messages, avoiding race conditions
|
||||
@@ -730,20 +698,6 @@ async def _stream_chat_chunks(
|
||||
f"{f' (retry {retry_count}/{MAX_RETRIES})' if retry_count > 0 else ''}"
|
||||
)
|
||||
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {
|
||||
"posthogProperties": {
|
||||
"environment": settings.config.app_env.value,
|
||||
},
|
||||
}
|
||||
if session.user_id:
|
||||
extra_body["user"] = session.user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = session.user_id
|
||||
if session.session_id:
|
||||
extra_body["session_id"] = session.session_id[
|
||||
:128
|
||||
] # OpenRouter limit
|
||||
|
||||
# Create the stream with proper types
|
||||
stream = await client.chat.completions.create(
|
||||
model=model,
|
||||
@@ -752,7 +706,6 @@ async def _stream_chat_chunks(
|
||||
tool_choice="auto",
|
||||
stream=True,
|
||||
stream_options={"include_usage": True},
|
||||
extra_body=extra_body,
|
||||
)
|
||||
|
||||
# Variables to accumulate tool calls
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import track_tool_called
|
||||
|
||||
from .add_understanding import AddUnderstandingTool
|
||||
from .agent_output import AgentOutputTool
|
||||
@@ -22,8 +20,6 @@ from .search_docs import SearchDocsTool
|
||||
if TYPE_CHECKING:
|
||||
from backend.api.features.chat.response_model import StreamToolOutputAvailable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Single source of truth for all tools
|
||||
TOOL_REGISTRY: dict[str, BaseTool] = {
|
||||
"add_understanding": AddUnderstandingTool(),
|
||||
@@ -60,17 +56,4 @@ async def execute_tool(
|
||||
tool = TOOL_REGISTRY.get(tool_name)
|
||||
if not tool:
|
||||
raise ValueError(f"Tool {tool_name} not found")
|
||||
|
||||
# Track tool call in PostHog
|
||||
logger.info(
|
||||
f"Tracking tool call: tool={tool_name}, user={user_id}, "
|
||||
f"session={session.session_id}, call_id={tool_call_id}"
|
||||
)
|
||||
track_tool_called(
|
||||
user_id=user_id,
|
||||
session_id=session.session_id,
|
||||
tool_name=tool_name,
|
||||
tool_call_id=tool_call_id,
|
||||
)
|
||||
|
||||
return await tool.execute(user_id, session, tool_call_id, **parameters)
|
||||
|
||||
@@ -8,10 +8,6 @@ from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.api.features.chat.config import ChatConfig
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import (
|
||||
track_agent_run_success,
|
||||
track_agent_scheduled,
|
||||
)
|
||||
from backend.api.features.library import db as library_db
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
@@ -457,16 +453,6 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_runs.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_run_success(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
execution_id=execution.id,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
@@ -548,18 +534,6 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_schedules.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_scheduled(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
schedule_id=result.id,
|
||||
schedule_name=schedule_name,
|
||||
cron=cron,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
"""PostHog analytics tracking for the chat system."""
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from posthog import Posthog
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
# PostHog client instance (lazily initialized)
|
||||
_posthog_client: Posthog | None = None
|
||||
|
||||
|
||||
def _shutdown_posthog() -> None:
|
||||
"""Flush and shutdown PostHog client on process exit."""
|
||||
if _posthog_client is not None:
|
||||
_posthog_client.flush()
|
||||
_posthog_client.shutdown()
|
||||
|
||||
|
||||
atexit.register(_shutdown_posthog)
|
||||
|
||||
|
||||
def _get_posthog_client() -> Posthog | None:
|
||||
"""Get or create the PostHog client instance."""
|
||||
global _posthog_client
|
||||
if _posthog_client is not None:
|
||||
return _posthog_client
|
||||
|
||||
if not settings.secrets.posthog_api_key:
|
||||
logger.debug("PostHog API key not configured, analytics disabled")
|
||||
return None
|
||||
|
||||
_posthog_client = Posthog(
|
||||
settings.secrets.posthog_api_key,
|
||||
host=settings.secrets.posthog_host,
|
||||
)
|
||||
logger.info(
|
||||
f"PostHog client initialized with host: {settings.secrets.posthog_host}"
|
||||
)
|
||||
return _posthog_client
|
||||
|
||||
|
||||
def _get_base_properties() -> dict[str, Any]:
|
||||
"""Get base properties included in all events."""
|
||||
return {
|
||||
"environment": settings.config.app_env.value,
|
||||
"source": "chat_copilot",
|
||||
}
|
||||
|
||||
|
||||
def track_user_message(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
message_length: int,
|
||||
) -> None:
|
||||
"""Track when a user sends a message in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
message_length: Length of the user's message
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"message_length": message_length,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id or f"anonymous_{session_id}",
|
||||
event="copilot_message_sent",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track user message: {e}")
|
||||
|
||||
|
||||
def track_tool_called(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
tool_name: str,
|
||||
tool_call_id: str,
|
||||
) -> None:
|
||||
"""Track when a tool is called in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
tool_name: Name of the tool being called
|
||||
tool_call_id: Unique ID of the tool call
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
logger.info("PostHog client not available for tool tracking")
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"tool_name": tool_name,
|
||||
"tool_call_id": tool_call_id,
|
||||
}
|
||||
distinct_id = user_id or f"anonymous_{session_id}"
|
||||
logger.info(
|
||||
f"Sending copilot_tool_called event to PostHog: distinct_id={distinct_id}, "
|
||||
f"tool_name={tool_name}"
|
||||
)
|
||||
client.capture(
|
||||
distinct_id=distinct_id,
|
||||
event="copilot_tool_called",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track tool call: {e}")
|
||||
|
||||
|
||||
def track_agent_run_success(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
execution_id: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully run.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
execution_id: ID of the execution
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"execution_id": execution_id,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_run_success",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent run: {e}")
|
||||
|
||||
|
||||
def track_agent_scheduled(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
schedule_id: str,
|
||||
schedule_name: str,
|
||||
cron: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully scheduled.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
schedule_id: ID of the schedule
|
||||
schedule_name: Name of the schedule
|
||||
cron: Cron expression for the schedule
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"schedule_id": schedule_id,
|
||||
"schedule_name": schedule_name,
|
||||
"cron": cron,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_scheduled",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent schedule: {e}")
|
||||
|
||||
|
||||
def track_trigger_setup(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
trigger_type: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when a trigger is set up for an agent.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
trigger_type: Type of trigger (e.g., 'webhook')
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"trigger_type": trigger_type,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_trigger_setup",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track trigger setup: {e}")
|
||||
@@ -1552,7 +1552,7 @@ async def review_store_submission(
|
||||
|
||||
# Generate embedding for approved listing (blocking - admin operation)
|
||||
# Inside transaction: if embedding fails, entire transaction rolls back
|
||||
embedding_success = await ensure_embedding(
|
||||
await ensure_embedding(
|
||||
version_id=store_listing_version_id,
|
||||
name=store_listing_version.name,
|
||||
description=store_listing_version.description,
|
||||
@@ -1560,12 +1560,6 @@ async def review_store_submission(
|
||||
categories=store_listing_version.categories or [],
|
||||
tx=tx,
|
||||
)
|
||||
if not embedding_success:
|
||||
raise ValueError(
|
||||
f"Failed to generate embedding for listing {store_listing_version_id}. "
|
||||
"This is likely due to OpenAI API being unavailable. "
|
||||
"Please try again later or contact support if the issue persists."
|
||||
)
|
||||
|
||||
await prisma.models.StoreListing.prisma(tx).update(
|
||||
where={"id": store_listing_version.StoreListing.id},
|
||||
|
||||
@@ -63,49 +63,42 @@ def build_searchable_text(
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
async def generate_embedding(text: str) -> list[float] | None:
|
||||
async def generate_embedding(text: str) -> list[float]:
|
||||
"""
|
||||
Generate embedding for text using OpenAI API.
|
||||
|
||||
Returns None if embedding generation fails.
|
||||
Fail-fast: no retries to maintain consistency with approval flow.
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
try:
|
||||
client = get_openai_client()
|
||||
if not client:
|
||||
logger.error("openai_internal_api_key not set, cannot generate embedding")
|
||||
return None
|
||||
client = get_openai_client()
|
||||
if not client:
|
||||
raise RuntimeError("openai_internal_api_key not set, cannot generate embedding")
|
||||
|
||||
# Truncate text to token limit using tiktoken
|
||||
# Character-based truncation is insufficient because token ratios vary by content type
|
||||
enc = encoding_for_model(EMBEDDING_MODEL)
|
||||
tokens = enc.encode(text)
|
||||
if len(tokens) > EMBEDDING_MAX_TOKENS:
|
||||
tokens = tokens[:EMBEDDING_MAX_TOKENS]
|
||||
truncated_text = enc.decode(tokens)
|
||||
logger.info(
|
||||
f"Truncated text from {len(enc.encode(text))} to {len(tokens)} tokens"
|
||||
)
|
||||
else:
|
||||
truncated_text = text
|
||||
|
||||
start_time = time.time()
|
||||
response = await client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=truncated_text,
|
||||
)
|
||||
latency_ms = (time.time() - start_time) * 1000
|
||||
|
||||
embedding = response.data[0].embedding
|
||||
# Truncate text to token limit using tiktoken
|
||||
# Character-based truncation is insufficient because token ratios vary by content type
|
||||
enc = encoding_for_model(EMBEDDING_MODEL)
|
||||
tokens = enc.encode(text)
|
||||
if len(tokens) > EMBEDDING_MAX_TOKENS:
|
||||
tokens = tokens[:EMBEDDING_MAX_TOKENS]
|
||||
truncated_text = enc.decode(tokens)
|
||||
logger.info(
|
||||
f"Generated embedding: {len(embedding)} dims, "
|
||||
f"{len(tokens)} tokens, {latency_ms:.0f}ms"
|
||||
f"Truncated text from {len(enc.encode(text))} to {len(tokens)} tokens"
|
||||
)
|
||||
return embedding
|
||||
else:
|
||||
truncated_text = text
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate embedding: {e}")
|
||||
return None
|
||||
start_time = time.time()
|
||||
response = await client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=truncated_text,
|
||||
)
|
||||
latency_ms = (time.time() - start_time) * 1000
|
||||
|
||||
embedding = response.data[0].embedding
|
||||
logger.info(
|
||||
f"Generated embedding: {len(embedding)} dims, "
|
||||
f"{len(tokens)} tokens, {latency_ms:.0f}ms"
|
||||
)
|
||||
return embedding
|
||||
|
||||
|
||||
async def store_embedding(
|
||||
@@ -144,48 +137,45 @@ async def store_content_embedding(
|
||||
|
||||
New function for unified content embedding storage.
|
||||
Uses raw SQL since Prisma doesn't natively support pgvector.
|
||||
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
try:
|
||||
client = tx if tx else prisma.get_client()
|
||||
client = tx if tx else prisma.get_client()
|
||||
|
||||
# Convert embedding to PostgreSQL vector format
|
||||
embedding_str = embedding_to_vector_string(embedding)
|
||||
metadata_json = dumps(metadata or {})
|
||||
# Convert embedding to PostgreSQL vector format
|
||||
embedding_str = embedding_to_vector_string(embedding)
|
||||
metadata_json = dumps(metadata or {})
|
||||
|
||||
# Upsert the embedding
|
||||
# WHERE clause in DO UPDATE prevents PostgreSQL 15 bug with NULLS NOT DISTINCT
|
||||
# Use unqualified ::vector - pgvector is in search_path on all environments
|
||||
await execute_raw_with_schema(
|
||||
"""
|
||||
INSERT INTO {schema_prefix}"UnifiedContentEmbedding" (
|
||||
"id", "contentType", "contentId", "userId", "embedding", "searchableText", "metadata", "createdAt", "updatedAt"
|
||||
)
|
||||
VALUES (gen_random_uuid()::text, $1::{schema_prefix}"ContentType", $2, $3, $4::vector, $5, $6::jsonb, NOW(), NOW())
|
||||
ON CONFLICT ("contentType", "contentId", "userId")
|
||||
DO UPDATE SET
|
||||
"embedding" = $4::vector,
|
||||
"searchableText" = $5,
|
||||
"metadata" = $6::jsonb,
|
||||
"updatedAt" = NOW()
|
||||
WHERE {schema_prefix}"UnifiedContentEmbedding"."contentType" = $1::{schema_prefix}"ContentType"
|
||||
AND {schema_prefix}"UnifiedContentEmbedding"."contentId" = $2
|
||||
AND ({schema_prefix}"UnifiedContentEmbedding"."userId" = $3 OR ($3 IS NULL AND {schema_prefix}"UnifiedContentEmbedding"."userId" IS NULL))
|
||||
""",
|
||||
content_type,
|
||||
content_id,
|
||||
user_id,
|
||||
embedding_str,
|
||||
searchable_text,
|
||||
metadata_json,
|
||||
client=client,
|
||||
# Upsert the embedding
|
||||
# WHERE clause in DO UPDATE prevents PostgreSQL 15 bug with NULLS NOT DISTINCT
|
||||
# Use unqualified ::vector - pgvector is in search_path on all environments
|
||||
await execute_raw_with_schema(
|
||||
"""
|
||||
INSERT INTO {schema_prefix}"UnifiedContentEmbedding" (
|
||||
"id", "contentType", "contentId", "userId", "embedding", "searchableText", "metadata", "createdAt", "updatedAt"
|
||||
)
|
||||
VALUES (gen_random_uuid()::text, $1::{schema_prefix}"ContentType", $2, $3, $4::vector, $5, $6::jsonb, NOW(), NOW())
|
||||
ON CONFLICT ("contentType", "contentId", "userId")
|
||||
DO UPDATE SET
|
||||
"embedding" = $4::vector,
|
||||
"searchableText" = $5,
|
||||
"metadata" = $6::jsonb,
|
||||
"updatedAt" = NOW()
|
||||
WHERE {schema_prefix}"UnifiedContentEmbedding"."contentType" = $1::{schema_prefix}"ContentType"
|
||||
AND {schema_prefix}"UnifiedContentEmbedding"."contentId" = $2
|
||||
AND ({schema_prefix}"UnifiedContentEmbedding"."userId" = $3 OR ($3 IS NULL AND {schema_prefix}"UnifiedContentEmbedding"."userId" IS NULL))
|
||||
""",
|
||||
content_type,
|
||||
content_id,
|
||||
user_id,
|
||||
embedding_str,
|
||||
searchable_text,
|
||||
metadata_json,
|
||||
client=client,
|
||||
)
|
||||
|
||||
logger.info(f"Stored embedding for {content_type}:{content_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store embedding for {content_type}:{content_id}: {e}")
|
||||
return False
|
||||
logger.info(f"Stored embedding for {content_type}:{content_id}")
|
||||
return True
|
||||
|
||||
|
||||
async def get_embedding(version_id: str) -> dict[str, Any] | None:
|
||||
@@ -217,34 +207,31 @@ async def get_content_embedding(
|
||||
|
||||
New function for unified content embedding retrieval.
|
||||
Returns dict with contentType, contentId, embedding, timestamps or None if not found.
|
||||
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
try:
|
||||
result = await query_raw_with_schema(
|
||||
"""
|
||||
SELECT
|
||||
"contentType",
|
||||
"contentId",
|
||||
"userId",
|
||||
"embedding"::text as "embedding",
|
||||
"searchableText",
|
||||
"metadata",
|
||||
"createdAt",
|
||||
"updatedAt"
|
||||
FROM {schema_prefix}"UnifiedContentEmbedding"
|
||||
WHERE "contentType" = $1::{schema_prefix}"ContentType" AND "contentId" = $2 AND ("userId" = $3 OR ($3 IS NULL AND "userId" IS NULL))
|
||||
""",
|
||||
content_type,
|
||||
content_id,
|
||||
user_id,
|
||||
)
|
||||
result = await query_raw_with_schema(
|
||||
"""
|
||||
SELECT
|
||||
"contentType",
|
||||
"contentId",
|
||||
"userId",
|
||||
"embedding"::text as "embedding",
|
||||
"searchableText",
|
||||
"metadata",
|
||||
"createdAt",
|
||||
"updatedAt"
|
||||
FROM {schema_prefix}"UnifiedContentEmbedding"
|
||||
WHERE "contentType" = $1::{schema_prefix}"ContentType" AND "contentId" = $2 AND ("userId" = $3 OR ($3 IS NULL AND "userId" IS NULL))
|
||||
""",
|
||||
content_type,
|
||||
content_id,
|
||||
user_id,
|
||||
)
|
||||
|
||||
if result and len(result) > 0:
|
||||
return result[0]
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get embedding for {content_type}:{content_id}: {e}")
|
||||
return None
|
||||
if result and len(result) > 0:
|
||||
return result[0]
|
||||
return None
|
||||
|
||||
|
||||
async def ensure_embedding(
|
||||
@@ -272,46 +259,38 @@ async def ensure_embedding(
|
||||
tx: Optional transaction client
|
||||
|
||||
Returns:
|
||||
True if embedding exists/was created, False on failure
|
||||
True if embedding exists/was created
|
||||
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
try:
|
||||
# Check if embedding already exists
|
||||
if not force:
|
||||
existing = await get_embedding(version_id)
|
||||
if existing and existing.get("embedding"):
|
||||
logger.debug(f"Embedding for version {version_id} already exists")
|
||||
return True
|
||||
# Check if embedding already exists
|
||||
if not force:
|
||||
existing = await get_embedding(version_id)
|
||||
if existing and existing.get("embedding"):
|
||||
logger.debug(f"Embedding for version {version_id} already exists")
|
||||
return True
|
||||
|
||||
# Build searchable text for embedding
|
||||
searchable_text = build_searchable_text(
|
||||
name, description, sub_heading, categories
|
||||
)
|
||||
# Build searchable text for embedding
|
||||
searchable_text = build_searchable_text(name, description, sub_heading, categories)
|
||||
|
||||
# Generate new embedding
|
||||
embedding = await generate_embedding(searchable_text)
|
||||
if embedding is None:
|
||||
logger.warning(f"Could not generate embedding for version {version_id}")
|
||||
return False
|
||||
# Generate new embedding
|
||||
embedding = await generate_embedding(searchable_text)
|
||||
|
||||
# Store the embedding with metadata using new function
|
||||
metadata = {
|
||||
"name": name,
|
||||
"subHeading": sub_heading,
|
||||
"categories": categories,
|
||||
}
|
||||
return await store_content_embedding(
|
||||
content_type=ContentType.STORE_AGENT,
|
||||
content_id=version_id,
|
||||
embedding=embedding,
|
||||
searchable_text=searchable_text,
|
||||
metadata=metadata,
|
||||
user_id=None, # Store agents are public
|
||||
tx=tx,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to ensure embedding for version {version_id}: {e}")
|
||||
return False
|
||||
# Store the embedding with metadata using new function
|
||||
metadata = {
|
||||
"name": name,
|
||||
"subHeading": sub_heading,
|
||||
"categories": categories,
|
||||
}
|
||||
return await store_content_embedding(
|
||||
content_type=ContentType.STORE_AGENT,
|
||||
content_id=version_id,
|
||||
embedding=embedding,
|
||||
searchable_text=searchable_text,
|
||||
metadata=metadata,
|
||||
user_id=None, # Store agents are public
|
||||
tx=tx,
|
||||
)
|
||||
|
||||
|
||||
async def delete_embedding(version_id: str) -> bool:
|
||||
@@ -521,6 +500,24 @@ async def backfill_all_content_types(batch_size: int = 10) -> dict[str, Any]:
|
||||
success = sum(1 for result in results if result is True)
|
||||
failed = len(results) - success
|
||||
|
||||
# Aggregate unique errors to avoid Sentry spam
|
||||
if failed > 0:
|
||||
# Group errors by type and message
|
||||
error_summary: dict[str, int] = {}
|
||||
for result in results:
|
||||
if isinstance(result, Exception):
|
||||
error_key = f"{type(result).__name__}: {str(result)}"
|
||||
error_summary[error_key] = error_summary.get(error_key, 0) + 1
|
||||
|
||||
# Log aggregated error summary
|
||||
error_details = ", ".join(
|
||||
f"{error} ({count}x)" for error, count in error_summary.items()
|
||||
)
|
||||
logger.error(
|
||||
f"{content_type.value}: {failed}/{len(results)} embeddings failed. "
|
||||
f"Errors: {error_details}"
|
||||
)
|
||||
|
||||
results_by_type[content_type.value] = {
|
||||
"processed": len(missing_items),
|
||||
"success": success,
|
||||
@@ -557,11 +554,12 @@ async def backfill_all_content_types(batch_size: int = 10) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
async def embed_query(query: str) -> list[float] | None:
|
||||
async def embed_query(query: str) -> list[float]:
|
||||
"""
|
||||
Generate embedding for a search query.
|
||||
|
||||
Same as generate_embedding but with clearer intent.
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
return await generate_embedding(query)
|
||||
|
||||
@@ -594,40 +592,30 @@ async def ensure_content_embedding(
|
||||
tx: Optional transaction client
|
||||
|
||||
Returns:
|
||||
True if embedding exists/was created, False on failure
|
||||
True if embedding exists/was created
|
||||
|
||||
Raises exceptions on failure - caller should handle.
|
||||
"""
|
||||
try:
|
||||
# Check if embedding already exists
|
||||
if not force:
|
||||
existing = await get_content_embedding(content_type, content_id, user_id)
|
||||
if existing and existing.get("embedding"):
|
||||
logger.debug(
|
||||
f"Embedding for {content_type}:{content_id} already exists"
|
||||
)
|
||||
return True
|
||||
# Check if embedding already exists
|
||||
if not force:
|
||||
existing = await get_content_embedding(content_type, content_id, user_id)
|
||||
if existing and existing.get("embedding"):
|
||||
logger.debug(f"Embedding for {content_type}:{content_id} already exists")
|
||||
return True
|
||||
|
||||
# Generate new embedding
|
||||
embedding = await generate_embedding(searchable_text)
|
||||
if embedding is None:
|
||||
logger.warning(
|
||||
f"Could not generate embedding for {content_type}:{content_id}"
|
||||
)
|
||||
return False
|
||||
# Generate new embedding
|
||||
embedding = await generate_embedding(searchable_text)
|
||||
|
||||
# Store the embedding
|
||||
return await store_content_embedding(
|
||||
content_type=content_type,
|
||||
content_id=content_id,
|
||||
embedding=embedding,
|
||||
searchable_text=searchable_text,
|
||||
metadata=metadata or {},
|
||||
user_id=user_id,
|
||||
tx=tx,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to ensure embedding for {content_type}:{content_id}: {e}")
|
||||
return False
|
||||
# Store the embedding
|
||||
return await store_content_embedding(
|
||||
content_type=content_type,
|
||||
content_id=content_id,
|
||||
embedding=embedding,
|
||||
searchable_text=searchable_text,
|
||||
metadata=metadata or {},
|
||||
user_id=user_id,
|
||||
tx=tx,
|
||||
)
|
||||
|
||||
|
||||
async def cleanup_orphaned_embeddings() -> dict[str, Any]:
|
||||
@@ -854,9 +842,8 @@ async def semantic_search(
|
||||
limit = 100
|
||||
|
||||
# Generate query embedding
|
||||
query_embedding = await embed_query(query)
|
||||
|
||||
if query_embedding is not None:
|
||||
try:
|
||||
query_embedding = await embed_query(query)
|
||||
# Semantic search with embeddings
|
||||
embedding_str = embedding_to_vector_string(query_embedding)
|
||||
|
||||
@@ -907,24 +894,21 @@ async def semantic_search(
|
||||
"""
|
||||
)
|
||||
|
||||
try:
|
||||
results = await query_raw_with_schema(sql, *params)
|
||||
return [
|
||||
{
|
||||
"content_id": row["content_id"],
|
||||
"content_type": row["content_type"],
|
||||
"searchable_text": row["searchable_text"],
|
||||
"metadata": row["metadata"],
|
||||
"similarity": float(row["similarity"]),
|
||||
}
|
||||
for row in results
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Semantic search failed: {e}")
|
||||
# Fall through to lexical search below
|
||||
results = await query_raw_with_schema(sql, *params)
|
||||
return [
|
||||
{
|
||||
"content_id": row["content_id"],
|
||||
"content_type": row["content_type"],
|
||||
"searchable_text": row["searchable_text"],
|
||||
"metadata": row["metadata"],
|
||||
"similarity": float(row["similarity"]),
|
||||
}
|
||||
for row in results
|
||||
]
|
||||
except Exception as e:
|
||||
logger.warning(f"Semantic search failed, falling back to lexical search: {e}")
|
||||
|
||||
# Fallback to lexical search if embeddings unavailable
|
||||
logger.warning("Falling back to lexical search (embeddings unavailable)")
|
||||
|
||||
params_lexical: list[Any] = [limit]
|
||||
user_filter = ""
|
||||
|
||||
@@ -298,17 +298,16 @@ async def test_schema_handling_error_cases():
|
||||
mock_client.execute_raw.side_effect = Exception("Database error")
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
result = await embeddings.store_content_embedding(
|
||||
content_type=ContentType.STORE_AGENT,
|
||||
content_id="test-id",
|
||||
embedding=[0.1] * EMBEDDING_DIM,
|
||||
searchable_text="test",
|
||||
metadata=None,
|
||||
user_id=None,
|
||||
)
|
||||
|
||||
# Should return False on error, not raise
|
||||
assert result is False
|
||||
# Should raise exception on error
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
await embeddings.store_content_embedding(
|
||||
content_type=ContentType.STORE_AGENT,
|
||||
content_id="test-id",
|
||||
embedding=[0.1] * EMBEDDING_DIM,
|
||||
searchable_text="test",
|
||||
metadata=None,
|
||||
user_id=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -80,9 +80,8 @@ async def test_generate_embedding_no_api_key():
|
||||
) as mock_get_client:
|
||||
mock_get_client.return_value = None
|
||||
|
||||
result = await embeddings.generate_embedding("test text")
|
||||
|
||||
assert result is None
|
||||
with pytest.raises(RuntimeError, match="openai_internal_api_key not set"):
|
||||
await embeddings.generate_embedding("test text")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -97,9 +96,8 @@ async def test_generate_embedding_api_error():
|
||||
) as mock_get_client:
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
result = await embeddings.generate_embedding("test text")
|
||||
|
||||
assert result is None
|
||||
with pytest.raises(Exception, match="API Error"):
|
||||
await embeddings.generate_embedding("test text")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -173,11 +171,10 @@ async def test_store_embedding_database_error(mocker):
|
||||
|
||||
embedding = [0.1, 0.2, 0.3]
|
||||
|
||||
result = await embeddings.store_embedding(
|
||||
version_id="test-version-id", embedding=embedding, tx=mock_client
|
||||
)
|
||||
|
||||
assert result is False
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
await embeddings.store_embedding(
|
||||
version_id="test-version-id", embedding=embedding, tx=mock_client
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -277,17 +274,16 @@ async def test_ensure_embedding_create_new(mock_get, mock_store, mock_generate):
|
||||
async def test_ensure_embedding_generation_fails(mock_get, mock_generate):
|
||||
"""Test ensure_embedding when generation fails."""
|
||||
mock_get.return_value = None
|
||||
mock_generate.return_value = None
|
||||
mock_generate.side_effect = Exception("Generation failed")
|
||||
|
||||
result = await embeddings.ensure_embedding(
|
||||
version_id="test-id",
|
||||
name="Test",
|
||||
description="Test description",
|
||||
sub_heading="Test heading",
|
||||
categories=["test"],
|
||||
)
|
||||
|
||||
assert result is False
|
||||
with pytest.raises(Exception, match="Generation failed"):
|
||||
await embeddings.ensure_embedding(
|
||||
version_id="test-id",
|
||||
name="Test",
|
||||
description="Test description",
|
||||
sub_heading="Test heading",
|
||||
categories=["test"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
|
||||
@@ -186,13 +186,12 @@ async def unified_hybrid_search(
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Generate query embedding
|
||||
query_embedding = await embed_query(query)
|
||||
|
||||
# Graceful degradation if embedding unavailable
|
||||
if query_embedding is None or not query_embedding:
|
||||
# Generate query embedding with graceful degradation
|
||||
try:
|
||||
query_embedding = await embed_query(query)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to generate query embedding - falling back to lexical-only search. "
|
||||
f"Failed to generate query embedding - falling back to lexical-only search: {e}. "
|
||||
"Check that openai_internal_api_key is configured and OpenAI API is accessible."
|
||||
)
|
||||
query_embedding = [0.0] * EMBEDDING_DIM
|
||||
@@ -464,13 +463,12 @@ async def hybrid_search(
|
||||
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Generate query embedding
|
||||
query_embedding = await embed_query(query)
|
||||
|
||||
# Graceful degradation
|
||||
if query_embedding is None or not query_embedding:
|
||||
# Generate query embedding with graceful degradation
|
||||
try:
|
||||
query_embedding = await embed_query(query)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to generate query embedding - falling back to lexical-only search."
|
||||
f"Failed to generate query embedding - falling back to lexical-only search: {e}"
|
||||
)
|
||||
query_embedding = [0.0] * EMBEDDING_DIM
|
||||
total_non_semantic = (
|
||||
|
||||
@@ -172,8 +172,8 @@ async def test_hybrid_search_without_embeddings():
|
||||
with patch(
|
||||
"backend.api.features.store.hybrid_search.query_raw_with_schema"
|
||||
) as mock_query:
|
||||
# Simulate embedding failure
|
||||
mock_embed.return_value = None
|
||||
# Simulate embedding failure by raising exception
|
||||
mock_embed.side_effect = Exception("Embedding generation failed")
|
||||
mock_query.return_value = mock_results
|
||||
|
||||
# Should NOT raise - graceful degradation
|
||||
@@ -613,7 +613,9 @@ async def test_unified_hybrid_search_graceful_degradation():
|
||||
"backend.api.features.store.hybrid_search.embed_query"
|
||||
) as mock_embed:
|
||||
mock_query.return_value = mock_results
|
||||
mock_embed.return_value = None # Embedding failure
|
||||
mock_embed.side_effect = Exception(
|
||||
"Embedding generation failed"
|
||||
) # Embedding failure
|
||||
|
||||
# Should NOT raise - graceful degradation
|
||||
results, total = await unified_hybrid_search(
|
||||
|
||||
@@ -666,12 +666,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
default="https://cloud.langfuse.com", description="Langfuse host URL"
|
||||
)
|
||||
|
||||
# PostHog analytics
|
||||
posthog_api_key: str = Field(default="", description="PostHog API key")
|
||||
posthog_host: str = Field(
|
||||
default="https://us.i.posthog.com", description="PostHog host URL"
|
||||
)
|
||||
|
||||
# Add more secret fields as needed
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
|
||||
12
autogpt_platform/backend/poetry.lock
generated
12
autogpt_platform/backend/poetry.lock
generated
@@ -4204,14 +4204,14 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "7.6.0"
|
||||
version = "6.1.1"
|
||||
description = "Integrate PostHog into any python application."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "posthog-7.6.0-py3-none-any.whl", hash = "sha256:c4dd78cf77c4fecceb965f86066e5ac37886ef867d68ffe75a1db5d681d7d9ad"},
|
||||
{file = "posthog-7.6.0.tar.gz", hash = "sha256:941dfd278ee427c9b14640f09b35b5bb52a71bdf028d7dbb7307e1838fd3002e"},
|
||||
{file = "posthog-6.1.1-py3-none-any.whl", hash = "sha256:329fd3d06b4d54cec925f47235bd8e327c91403c2f9ec38f1deb849535934dba"},
|
||||
{file = "posthog-6.1.1.tar.gz", hash = "sha256:b453f54c4a2589da859fd575dd3bf86fcb40580727ec399535f268b1b9f318b8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4225,7 +4225,7 @@ typing-extensions = ">=4.2.0"
|
||||
[package.extras]
|
||||
dev = ["django-stubs", "lxml", "mypy", "mypy-baseline", "packaging", "pre-commit", "pydantic", "ruff", "setuptools", "tomli", "tomli_w", "twine", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six", "wheel"]
|
||||
langchain = ["langchain (>=0.2.0)"]
|
||||
test = ["anthropic (>=0.72)", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=1.0)", "langchain-community (>=0.4)", "langchain-core (>=1.0)", "langchain-openai (>=1.0)", "langgraph (>=1.0)", "mock (>=2.0.0)", "openai (>=2.0)", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
test = ["anthropic", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=0.3.15)", "langchain-community (>=0.3.25)", "langchain-core (>=0.3.65)", "langchain-openai (>=0.3.22)", "langgraph (>=0.4.8)", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "postmarker"
|
||||
@@ -7512,4 +7512,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "ee5742dc1a9df50dfc06d4b26a1682cbb2b25cab6b79ce5625ec272f93e4f4bf"
|
||||
content-hash = "18b92e09596298c82432e4d0a85cb6d80a40b4229bee0a0c15f0529fd6cb21a4"
|
||||
|
||||
@@ -85,7 +85,6 @@ exa-py = "^1.14.20"
|
||||
croniter = "^6.0.0"
|
||||
stagehand = "^0.5.1"
|
||||
gravitas-md2gdocs = "^0.1.0"
|
||||
posthog = "^7.6.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aiohappyeyeballs = "^2.6.1"
|
||||
|
||||
@@ -30,7 +30,3 @@ NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# PR previews
|
||||
NEXT_PUBLIC_PREVIEW_STEALING_DEV=
|
||||
|
||||
# PostHog Analytics
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@posthog/react": "1.7.0",
|
||||
"@radix-ui/react-accordion": "1.2.12",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
"@radix-ui/react-avatar": "1.1.10",
|
||||
@@ -92,7 +91,6 @@
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.7.2",
|
||||
"party-js": "2.2.0",
|
||||
"posthog-js": "1.334.1",
|
||||
"react": "18.3.1",
|
||||
"react-currency-input-field": "4.0.3",
|
||||
"react-day-picker": "9.11.1",
|
||||
@@ -122,6 +120,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.1.2",
|
||||
"happy-dom": "20.3.4",
|
||||
"@opentelemetry/instrumentation": "0.209.0",
|
||||
"@playwright/test": "1.56.1",
|
||||
"@storybook/addon-a11y": "9.1.5",
|
||||
@@ -149,7 +148,6 @@
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.5.7",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
"happy-dom": "20.3.4",
|
||||
"import-in-the-middle": "2.0.2",
|
||||
"msw": "2.11.6",
|
||||
"msw-storybook-addon": "2.0.6",
|
||||
|
||||
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -23,9 +23,6 @@ importers:
|
||||
'@phosphor-icons/react':
|
||||
specifier: 2.1.10
|
||||
version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
'@posthog/react':
|
||||
specifier: 1.7.0
|
||||
version: 1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)
|
||||
'@radix-ui/react-accordion':
|
||||
specifier: 1.2.12
|
||||
version: 1.2.12(@types/react-dom@18.3.5(@types/react@18.3.17))(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -197,9 +194,6 @@ importers:
|
||||
party-js:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0
|
||||
posthog-js:
|
||||
specifier: 1.334.1
|
||||
version: 1.334.1
|
||||
react:
|
||||
specifier: 18.3.1
|
||||
version: 18.3.1
|
||||
@@ -1800,10 +1794,6 @@ packages:
|
||||
'@open-draft/until@2.1.0':
|
||||
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
resolution: {integrity: sha512-xomnUNi7TiAGtOgs0tb54LyrjRZLu9shJGGwkcN7NgtiPYOpNnKLkRJtzZvTjD/w6knSZH9sFZcUSUovYOPg6A==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
@@ -1824,12 +1814,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.0.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0':
|
||||
resolution: {integrity: sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0':
|
||||
resolution: {integrity: sha512-5ULoU8p+tWcQw5PDYZn8rySptGSLZHNX/7srqo2TioPnAAcvTy6sQFQXsNPrAnyRRtYGMetXVyZUy5OaX1+IfA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1968,18 +1952,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0':
|
||||
resolution: {integrity: sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0':
|
||||
resolution: {integrity: sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2':
|
||||
resolution: {integrity: sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1990,18 +1962,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.3.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0':
|
||||
resolution: {integrity: sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.4.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0':
|
||||
resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.9.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0':
|
||||
resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -2090,57 +2050,11 @@ packages:
|
||||
webpack-plugin-serve:
|
||||
optional: true
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
resolution: {integrity: sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg==}
|
||||
|
||||
'@posthog/react@1.7.0':
|
||||
resolution: {integrity: sha512-pM7GL7z/rKjiIwosbRiQA3buhLI6vUo+wg+T/ZrVZC7O5bVU07TfgNZTcuOj8E9dx7vDbfNrc1kjDN7PKMM8ug==}
|
||||
peerDependencies:
|
||||
'@types/react': '>=16.8.0'
|
||||
posthog-js: '>=1.257.2'
|
||||
react: '>=16.8.0'
|
||||
peerDependenciesMeta:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
'@posthog/types@1.334.1':
|
||||
resolution: {integrity: sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA==}
|
||||
|
||||
'@prisma/instrumentation@6.19.0':
|
||||
resolution: {integrity: sha512-QcuYy25pkXM8BJ37wVFBO7Zh34nyRV1GOb2n3lPkkbRYfl4hWl3PTcImP41P0KrzVXfa/45p6eVCos27x3exIg==}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.8
|
||||
|
||||
'@protobufjs/aspromise@1.1.2':
|
||||
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
|
||||
|
||||
'@protobufjs/base64@1.1.2':
|
||||
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
|
||||
|
||||
'@protobufjs/codegen@2.0.4':
|
||||
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0':
|
||||
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
|
||||
|
||||
'@protobufjs/float@1.0.2':
|
||||
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
|
||||
|
||||
'@protobufjs/inquire@1.1.0':
|
||||
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
|
||||
|
||||
'@protobufjs/path@1.1.2':
|
||||
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
|
||||
|
||||
'@protobufjs/pool@1.1.0':
|
||||
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
|
||||
|
||||
'@protobufjs/utf8@1.1.0':
|
||||
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
|
||||
|
||||
'@radix-ui/number@1.1.1':
|
||||
resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==}
|
||||
|
||||
@@ -3487,9 +3401,6 @@ packages:
|
||||
'@types/tedious@4.0.14':
|
||||
resolution: {integrity: sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==}
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
|
||||
|
||||
'@types/unist@2.0.11':
|
||||
resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==}
|
||||
|
||||
@@ -4367,9 +4278,6 @@ packages:
|
||||
core-js-pure@3.47.0:
|
||||
resolution: {integrity: sha512-BcxeDbzUrRnXGYIVAGFtcGQVNpFcUhVjr6W7F8XktvQW2iJP9e66GP6xdKotCRFlrxBvNIBrhwKteRXqMV86Nw==}
|
||||
|
||||
core-js@3.48.0:
|
||||
resolution: {integrity: sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==}
|
||||
|
||||
core-util-is@1.0.3:
|
||||
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||
|
||||
@@ -4661,9 +4569,6 @@ packages:
|
||||
resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
|
||||
engines: {node: '>= 4'}
|
||||
|
||||
dompurify@3.3.1:
|
||||
resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==}
|
||||
|
||||
domutils@2.8.0:
|
||||
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
|
||||
|
||||
@@ -5034,9 +4939,6 @@ packages:
|
||||
picomatch:
|
||||
optional: true
|
||||
|
||||
fflate@0.4.8:
|
||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
||||
engines: {node: ^10.12.0 || >=12.0.0}
|
||||
@@ -5843,9 +5745,6 @@ packages:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
long@5.3.2:
|
||||
resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -6635,12 +6534,6 @@ packages:
|
||||
resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
posthog-js@1.334.1:
|
||||
resolution: {integrity: sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ==}
|
||||
|
||||
preact@10.28.2:
|
||||
resolution: {integrity: sha512-lbteaWGzGHdlIuiJ0l2Jq454m6kcpI1zNje6d8MlGAFlYvP2GO4ibnat7P74Esfz4sPTdM6UxtTwh/d3pwM9JA==}
|
||||
|
||||
prelude-ls@1.2.1:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@@ -6729,10 +6622,6 @@ packages:
|
||||
property-information@7.1.0:
|
||||
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
proxy-from-env@1.1.0:
|
||||
resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==}
|
||||
|
||||
@@ -6754,9 +6643,6 @@ packages:
|
||||
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||
engines: {node: '>=0.6'}
|
||||
|
||||
query-selector-shadow-dom@1.0.1:
|
||||
resolution: {integrity: sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw==}
|
||||
|
||||
querystring-es3@0.2.1:
|
||||
resolution: {integrity: sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==}
|
||||
engines: {node: '>=0.4.x'}
|
||||
@@ -7935,9 +7821,6 @@ packages:
|
||||
web-namespaces@2.0.1:
|
||||
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==}
|
||||
|
||||
web-vitals@5.1.0:
|
||||
resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==}
|
||||
|
||||
webidl-conversions@3.0.1:
|
||||
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
|
||||
|
||||
@@ -9537,10 +9420,6 @@ snapshots:
|
||||
|
||||
'@open-draft/until@2.1.0': {}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9556,15 +9435,6 @@ snapshots:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9759,23 +9629,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
protobufjs: 7.5.4
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2': {}
|
||||
|
||||
'@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
@@ -9784,19 +9637,6 @@ snapshots:
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9961,19 +9801,6 @@ snapshots:
|
||||
type-fest: 4.41.0
|
||||
webpack-hot-middleware: 2.26.1
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
dependencies:
|
||||
cross-spawn: 7.0.6
|
||||
|
||||
'@posthog/react@1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)':
|
||||
dependencies:
|
||||
posthog-js: 1.334.1
|
||||
react: 18.3.1
|
||||
optionalDependencies:
|
||||
'@types/react': 18.3.17
|
||||
|
||||
'@posthog/types@1.334.1': {}
|
||||
|
||||
'@prisma/instrumentation@6.19.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9981,29 +9808,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@protobufjs/aspromise@1.1.2': {}
|
||||
|
||||
'@protobufjs/base64@1.1.2': {}
|
||||
|
||||
'@protobufjs/codegen@2.0.4': {}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0': {}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
|
||||
'@protobufjs/float@1.0.2': {}
|
||||
|
||||
'@protobufjs/inquire@1.1.0': {}
|
||||
|
||||
'@protobufjs/path@1.1.2': {}
|
||||
|
||||
'@protobufjs/pool@1.1.0': {}
|
||||
|
||||
'@protobufjs/utf8@1.1.0': {}
|
||||
|
||||
'@radix-ui/number@1.1.1': {}
|
||||
|
||||
'@radix-ui/primitive@1.1.3': {}
|
||||
@@ -11622,9 +11426,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 24.10.0
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
optional: true
|
||||
|
||||
'@types/unist@2.0.11': {}
|
||||
|
||||
'@types/unist@3.0.3': {}
|
||||
@@ -12526,8 +12327,6 @@ snapshots:
|
||||
|
||||
core-js-pure@3.47.0: {}
|
||||
|
||||
core-js@3.48.0: {}
|
||||
|
||||
core-util-is@1.0.3: {}
|
||||
|
||||
cosmiconfig@7.1.0:
|
||||
@@ -12837,10 +12636,6 @@ snapshots:
|
||||
dependencies:
|
||||
domelementtype: 2.3.0
|
||||
|
||||
dompurify@3.3.1:
|
||||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
domutils@2.8.0:
|
||||
dependencies:
|
||||
dom-serializer: 1.4.1
|
||||
@@ -13410,8 +13205,6 @@ snapshots:
|
||||
optionalDependencies:
|
||||
picomatch: 4.0.3
|
||||
|
||||
fflate@0.4.8: {}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
dependencies:
|
||||
flat-cache: 3.2.0
|
||||
@@ -14299,8 +14092,6 @@ snapshots:
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
long@5.3.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -15363,24 +15154,6 @@ snapshots:
|
||||
dependencies:
|
||||
xtend: 4.0.2
|
||||
|
||||
posthog-js@1.334.1:
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/exporter-logs-otlp-http': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@posthog/core': 1.13.0
|
||||
'@posthog/types': 1.334.1
|
||||
core-js: 3.48.0
|
||||
dompurify: 3.3.1
|
||||
fflate: 0.4.8
|
||||
preact: 10.28.2
|
||||
query-selector-shadow-dom: 1.0.1
|
||||
web-vitals: 5.1.0
|
||||
|
||||
preact@10.28.2: {}
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.1(prettier@3.6.2):
|
||||
@@ -15414,21 +15187,6 @@ snapshots:
|
||||
|
||||
property-information@7.1.0: {}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/base64': 1.1.2
|
||||
'@protobufjs/codegen': 2.0.4
|
||||
'@protobufjs/eventemitter': 1.1.0
|
||||
'@protobufjs/fetch': 1.1.0
|
||||
'@protobufjs/float': 1.0.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
'@protobufjs/path': 1.1.2
|
||||
'@protobufjs/pool': 1.1.0
|
||||
'@protobufjs/utf8': 1.1.0
|
||||
'@types/node': 24.10.0
|
||||
long: 5.3.2
|
||||
|
||||
proxy-from-env@1.1.0: {}
|
||||
|
||||
public-encrypt@4.0.3:
|
||||
@@ -15450,8 +15208,6 @@ snapshots:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
|
||||
query-selector-shadow-dom@1.0.1: {}
|
||||
|
||||
querystring-es3@0.2.1: {}
|
||||
|
||||
queue-microtask@1.2.3: {}
|
||||
@@ -16863,8 +16619,6 @@ snapshots:
|
||||
|
||||
web-namespaces@2.0.1: {}
|
||||
|
||||
web-vitals@5.1.0: {}
|
||||
|
||||
webidl-conversions@3.0.1: {}
|
||||
|
||||
webidl-conversions@8.0.1:
|
||||
|
||||
@@ -1,62 +1,33 @@
|
||||
"use client";
|
||||
|
||||
import { TooltipProvider } from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import {
|
||||
PostHogPageViewTracker,
|
||||
PostHogUserTracker,
|
||||
} from "@/components/monitor/PostHogUserTracker";
|
||||
import { SentryUserTracker } from "@/components/monitor/SentryUserTracker";
|
||||
import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
|
||||
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
|
||||
import { LaunchDarklyProvider } from "@/services/feature-flags/feature-flag-provider";
|
||||
import { PostHogProvider as PHProvider } from "@posthog/react";
|
||||
import { QueryClientProvider } from "@tanstack/react-query";
|
||||
import { ThemeProvider, ThemeProviderProps } from "next-themes";
|
||||
import { NuqsAdapter } from "nuqs/adapters/next/app";
|
||||
import posthog from "posthog-js";
|
||||
import { Suspense, useEffect } from "react";
|
||||
|
||||
function PostHogProvider({ children }: { children: React.ReactNode }) {
|
||||
useEffect(() => {
|
||||
if (process.env.NEXT_PUBLIC_POSTHOG_KEY) {
|
||||
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, {
|
||||
api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
|
||||
defaults: "2025-11-30",
|
||||
capture_pageview: false,
|
||||
capture_pageleave: true,
|
||||
autocapture: true,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
return <PHProvider client={posthog}>{children}</PHProvider>;
|
||||
}
|
||||
|
||||
export function Providers({ children, ...props }: ThemeProviderProps) {
|
||||
const queryClient = getQueryClient();
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<NuqsAdapter>
|
||||
<PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<PostHogUserTracker />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageViewTracker />
|
||||
</Suspense>
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</NuqsAdapter>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { usePathname, useSearchParams } from "next/navigation";
|
||||
import posthog from "posthog-js";
|
||||
import { useEffect, useRef } from "react";
|
||||
|
||||
/**
|
||||
* PostHogUserTracker component identifies users in PostHog for analytics.
|
||||
* This component should be placed high in the component tree to ensure user
|
||||
* identification happens as soon as the user logs in.
|
||||
*
|
||||
* It automatically:
|
||||
* - Identifies the user when they log in (linking anonymous to authenticated)
|
||||
* - Resets PostHog when a user logs out
|
||||
* - Updates identification when user data changes
|
||||
*/
|
||||
export function PostHogUserTracker() {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const previousUserIdRef = useRef<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isUserLoading) return;
|
||||
|
||||
if (user) {
|
||||
// Only identify if we haven't already identified this user
|
||||
if (previousUserIdRef.current !== user.id) {
|
||||
posthog.identify(user.id, {
|
||||
email: user.email,
|
||||
...(user.user_metadata?.name && { name: user.user_metadata.name }),
|
||||
});
|
||||
previousUserIdRef.current = user.id;
|
||||
}
|
||||
} else if (previousUserIdRef.current !== null) {
|
||||
// User logged out - reset PostHog to generate new anonymous ID
|
||||
posthog.reset();
|
||||
previousUserIdRef.current = null;
|
||||
}
|
||||
}, [user, isUserLoading]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* PostHogPageViewTracker captures page views on route changes in Next.js App Router.
|
||||
* The default PostHog capture_pageview only works for initial page loads.
|
||||
* This component ensures soft navigations (client-side route changes) are also tracked.
|
||||
*/
|
||||
export function PostHogPageViewTracker() {
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname) {
|
||||
let url = window.origin + pathname;
|
||||
if (searchParams && searchParams.toString()) {
|
||||
url = url + `?${searchParams.toString()}`;
|
||||
}
|
||||
posthog.capture("$pageview", { $current_url: url });
|
||||
}
|
||||
}, [pathname, searchParams]);
|
||||
|
||||
return null;
|
||||
}
|
||||
Reference in New Issue
Block a user