mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-26 15:38:14 -05:00
Compare commits
3 Commits
feat/agent
...
fix/review
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b08f1d7a0 | ||
|
|
fe69677758 | ||
|
|
d9864687fd |
@@ -178,10 +178,5 @@ AYRSHARE_JWT_KEY=
|
||||
SMARTLEAD_API_KEY=
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# PostHog Analytics
|
||||
# Get API key from https://posthog.com - Project Settings > Project API Key
|
||||
POSTHOG_API_KEY=
|
||||
POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
# Other Services
|
||||
AUTOMOD_API_KEY=
|
||||
|
||||
@@ -86,8 +86,6 @@ async def execute_graph_block(
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
if obj.disabled:
|
||||
raise HTTPException(status_code=403, detail=f"Block #{block_id} is disabled.")
|
||||
|
||||
output = defaultdict(list)
|
||||
async for name, data in obj.execute(data):
|
||||
|
||||
@@ -48,7 +48,6 @@ from .response_model import (
|
||||
StreamUsage,
|
||||
)
|
||||
from .tools import execute_tool, tools
|
||||
from .tracking import track_user_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -104,33 +103,16 @@ async def _build_system_prompt(user_id: str | None) -> tuple[str, Any]:
|
||||
return compiled, understanding
|
||||
|
||||
|
||||
async def _generate_session_title(
|
||||
message: str,
|
||||
user_id: str | None = None,
|
||||
session_id: str | None = None,
|
||||
) -> str | None:
|
||||
async def _generate_session_title(message: str) -> str | None:
|
||||
"""Generate a concise title for a chat session based on the first message.
|
||||
|
||||
Args:
|
||||
message: The first user message in the session
|
||||
user_id: User ID for OpenRouter tracing (optional)
|
||||
session_id: Session ID for OpenRouter tracing (optional)
|
||||
|
||||
Returns:
|
||||
A short title (3-6 words) or None if generation fails
|
||||
"""
|
||||
try:
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {}
|
||||
if user_id:
|
||||
extra_body["user"] = user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = user_id
|
||||
if session_id:
|
||||
extra_body["session_id"] = session_id[:128] # OpenRouter limit
|
||||
extra_body["posthogProperties"] = {
|
||||
"environment": settings.config.app_env.value,
|
||||
}
|
||||
|
||||
response = await client.chat.completions.create(
|
||||
model=config.title_model,
|
||||
messages=[
|
||||
@@ -145,7 +127,6 @@ async def _generate_session_title(
|
||||
{"role": "user", "content": message[:500]}, # Limit input length
|
||||
],
|
||||
max_tokens=20,
|
||||
extra_body=extra_body,
|
||||
)
|
||||
title = response.choices[0].message.content
|
||||
if title:
|
||||
@@ -237,9 +218,18 @@ async def stream_chat_completion(
|
||||
)
|
||||
|
||||
if message:
|
||||
# Build message content with context if provided
|
||||
message_content = message
|
||||
if context and context.get("url") and context.get("content"):
|
||||
context_text = f"Page URL: {context['url']}\n\nPage Content:\n{context['content']}\n\n---\n\nUser Message: {message}"
|
||||
message_content = context_text
|
||||
logger.info(
|
||||
f"Including page context: URL={context['url']}, content_length={len(context['content'])}"
|
||||
)
|
||||
|
||||
session.messages.append(
|
||||
ChatMessage(
|
||||
role="user" if is_user_message else "assistant", content=message
|
||||
role="user" if is_user_message else "assistant", content=message_content
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
@@ -247,14 +237,6 @@ async def stream_chat_completion(
|
||||
f"new message_count={len(session.messages)}"
|
||||
)
|
||||
|
||||
# Track user message in PostHog
|
||||
if is_user_message:
|
||||
track_user_message(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
message_length=len(message),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Upserting session: {session.session_id} with user id {session.user_id}, "
|
||||
f"message_count={len(session.messages)}"
|
||||
@@ -274,15 +256,10 @@ async def stream_chat_completion(
|
||||
# stale data issues when the main flow modifies the session
|
||||
captured_session_id = session_id
|
||||
captured_message = message
|
||||
captured_user_id = user_id
|
||||
|
||||
async def _update_title():
|
||||
try:
|
||||
title = await _generate_session_title(
|
||||
captured_message,
|
||||
user_id=captured_user_id,
|
||||
session_id=captured_session_id,
|
||||
)
|
||||
title = await _generate_session_title(captured_message)
|
||||
if title:
|
||||
# Use dedicated title update function that doesn't
|
||||
# touch messages, avoiding race conditions
|
||||
@@ -721,20 +698,6 @@ async def _stream_chat_chunks(
|
||||
f"{f' (retry {retry_count}/{MAX_RETRIES})' if retry_count > 0 else ''}"
|
||||
)
|
||||
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {
|
||||
"posthogProperties": {
|
||||
"environment": settings.config.app_env.value,
|
||||
},
|
||||
}
|
||||
if session.user_id:
|
||||
extra_body["user"] = session.user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = session.user_id
|
||||
if session.session_id:
|
||||
extra_body["session_id"] = session.session_id[
|
||||
:128
|
||||
] # OpenRouter limit
|
||||
|
||||
# Create the stream with proper types
|
||||
stream = await client.chat.completions.create(
|
||||
model=model,
|
||||
@@ -743,7 +706,6 @@ async def _stream_chat_chunks(
|
||||
tool_choice="auto",
|
||||
stream=True,
|
||||
stream_options={"include_usage": True},
|
||||
extra_body=extra_body,
|
||||
)
|
||||
|
||||
# Variables to accumulate tool calls
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import track_tool_called
|
||||
|
||||
from .add_understanding import AddUnderstandingTool
|
||||
from .agent_output import AgentOutputTool
|
||||
@@ -22,8 +20,6 @@ from .search_docs import SearchDocsTool
|
||||
if TYPE_CHECKING:
|
||||
from backend.api.features.chat.response_model import StreamToolOutputAvailable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Single source of truth for all tools
|
||||
TOOL_REGISTRY: dict[str, BaseTool] = {
|
||||
"add_understanding": AddUnderstandingTool(),
|
||||
@@ -60,17 +56,4 @@ async def execute_tool(
|
||||
tool = TOOL_REGISTRY.get(tool_name)
|
||||
if not tool:
|
||||
raise ValueError(f"Tool {tool_name} not found")
|
||||
|
||||
# Track tool call in PostHog
|
||||
logger.info(
|
||||
f"Tracking tool call: tool={tool_name}, user={user_id}, "
|
||||
f"session={session.session_id}, call_id={tool_call_id}"
|
||||
)
|
||||
track_tool_called(
|
||||
user_id=user_id,
|
||||
session_id=session.session_id,
|
||||
tool_name=tool_name,
|
||||
tool_call_id=tool_call_id,
|
||||
)
|
||||
|
||||
return await tool.execute(user_id, session, tool_call_id, **parameters)
|
||||
|
||||
@@ -8,10 +8,6 @@ from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.api.features.chat.config import ChatConfig
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import (
|
||||
track_agent_run_success,
|
||||
track_agent_scheduled,
|
||||
)
|
||||
from backend.api.features.library import db as library_db
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
@@ -457,16 +453,6 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_runs.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_run_success(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
execution_id=execution.id,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
@@ -548,18 +534,6 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_schedules.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_scheduled(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
schedule_id=result.id,
|
||||
schedule_name=schedule_name,
|
||||
cron=cron,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
|
||||
@@ -179,11 +179,6 @@ class RunBlockTool(BaseTool):
|
||||
message=f"Block '{block_id}' not found",
|
||||
session_id=session_id,
|
||||
)
|
||||
if block.disabled:
|
||||
return ErrorResponse(
|
||||
message=f"Block '{block_id}' is disabled",
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
||||
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
"""PostHog analytics tracking for the chat system."""
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from posthog import Posthog
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
# PostHog client instance (lazily initialized)
|
||||
_posthog_client: Posthog | None = None
|
||||
|
||||
|
||||
def _shutdown_posthog() -> None:
|
||||
"""Flush and shutdown PostHog client on process exit."""
|
||||
if _posthog_client is not None:
|
||||
_posthog_client.flush()
|
||||
_posthog_client.shutdown()
|
||||
|
||||
|
||||
atexit.register(_shutdown_posthog)
|
||||
|
||||
|
||||
def _get_posthog_client() -> Posthog | None:
|
||||
"""Get or create the PostHog client instance."""
|
||||
global _posthog_client
|
||||
if _posthog_client is not None:
|
||||
return _posthog_client
|
||||
|
||||
if not settings.secrets.posthog_api_key:
|
||||
logger.debug("PostHog API key not configured, analytics disabled")
|
||||
return None
|
||||
|
||||
_posthog_client = Posthog(
|
||||
settings.secrets.posthog_api_key,
|
||||
host=settings.secrets.posthog_host,
|
||||
)
|
||||
logger.info(
|
||||
f"PostHog client initialized with host: {settings.secrets.posthog_host}"
|
||||
)
|
||||
return _posthog_client
|
||||
|
||||
|
||||
def _get_base_properties() -> dict[str, Any]:
|
||||
"""Get base properties included in all events."""
|
||||
return {
|
||||
"environment": settings.config.app_env.value,
|
||||
"source": "chat_copilot",
|
||||
}
|
||||
|
||||
|
||||
def track_user_message(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
message_length: int,
|
||||
) -> None:
|
||||
"""Track when a user sends a message in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
message_length: Length of the user's message
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"message_length": message_length,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id or f"anonymous_{session_id}",
|
||||
event="copilot_message_sent",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track user message: {e}")
|
||||
|
||||
|
||||
def track_tool_called(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
tool_name: str,
|
||||
tool_call_id: str,
|
||||
) -> None:
|
||||
"""Track when a tool is called in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
tool_name: Name of the tool being called
|
||||
tool_call_id: Unique ID of the tool call
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
logger.info("PostHog client not available for tool tracking")
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"tool_name": tool_name,
|
||||
"tool_call_id": tool_call_id,
|
||||
}
|
||||
distinct_id = user_id or f"anonymous_{session_id}"
|
||||
logger.info(
|
||||
f"Sending copilot_tool_called event to PostHog: distinct_id={distinct_id}, "
|
||||
f"tool_name={tool_name}"
|
||||
)
|
||||
client.capture(
|
||||
distinct_id=distinct_id,
|
||||
event="copilot_tool_called",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track tool call: {e}")
|
||||
|
||||
|
||||
def track_agent_run_success(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
execution_id: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully run.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
execution_id: ID of the execution
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"execution_id": execution_id,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_run_success",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent run: {e}")
|
||||
|
||||
|
||||
def track_agent_scheduled(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
schedule_id: str,
|
||||
schedule_name: str,
|
||||
cron: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully scheduled.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
schedule_id: ID of the schedule
|
||||
schedule_name: Name of the schedule
|
||||
cron: Cron expression for the schedule
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"schedule_id": schedule_id,
|
||||
"schedule_name": schedule_name,
|
||||
"cron": cron,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_scheduled",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent schedule: {e}")
|
||||
|
||||
|
||||
def track_trigger_setup(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
trigger_type: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when a trigger is set up for an agent.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
trigger_type: Type of trigger (e.g., 'webhook')
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"trigger_type": trigger_type,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_trigger_setup",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track trigger setup: {e}")
|
||||
@@ -164,9 +164,9 @@ async def test_process_review_action_approve_success(
|
||||
"""Test successful review approval"""
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -244,9 +244,9 @@ async def test_process_review_action_reject_success(
|
||||
"""Test successful review rejection"""
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -339,9 +339,9 @@ async def test_process_review_action_mixed_success(
|
||||
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {
|
||||
"test_node_123": sample_pending_review,
|
||||
@@ -463,9 +463,9 @@ async def test_process_review_action_review_not_found(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error when review is not found"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Return empty dict to simulate review not found
|
||||
mock_get_reviews_for_user.return_value = {}
|
||||
@@ -506,7 +506,7 @@ async def test_process_review_action_review_not_found(
|
||||
response = await client.post("/api/review/action", json=request_data)
|
||||
|
||||
assert response.status_code == 404
|
||||
assert "No pending review found" in response.json()["detail"]
|
||||
assert "Review(s) not found" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -517,9 +517,9 @@ async def test_process_review_action_partial_failure(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test handling of partial failures in review processing"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -567,9 +567,9 @@ async def test_process_review_action_invalid_node_exec_id(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test failure when trying to process review with invalid node execution ID"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Return empty dict to simulate review not found
|
||||
mock_get_reviews_for_user.return_value = {}
|
||||
@@ -596,7 +596,7 @@ async def test_process_review_action_invalid_node_exec_id(
|
||||
|
||||
# Returns 404 when review is not found
|
||||
assert response.status_code == 404
|
||||
assert "No pending review found" in response.json()["detail"]
|
||||
assert "Review(s) not found" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -607,9 +607,9 @@ async def test_process_review_action_auto_approve_creates_auto_approval_records(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that auto_approve_future_actions flag creates auto-approval records"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -737,9 +737,9 @@ async def test_process_review_action_without_auto_approve_still_loads_settings(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that execution context is created with settings even without auto-approve"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -885,9 +885,9 @@ async def test_process_review_action_auto_approve_only_applies_to_approved_revie
|
||||
reviewed_at=FIXED_NOW,
|
||||
)
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Need to return both reviews in WAITING state (before processing)
|
||||
approved_review_waiting = PendingHumanReviewModel(
|
||||
@@ -1031,9 +1031,9 @@ async def test_process_review_action_per_review_auto_approve_granularity(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that auto-approval can be set per-review (granular control)"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids - return different reviews based on node_exec_id
|
||||
# Mock get_reviews_by_node_exec_ids - return different reviews based on node_exec_id
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
|
||||
# Create a mapping of node_exec_id to review
|
||||
|
||||
@@ -14,9 +14,9 @@ from backend.data.execution import (
|
||||
from backend.data.graph import get_graph_settings
|
||||
from backend.data.human_review import (
|
||||
create_auto_approval_record,
|
||||
get_pending_reviews_by_node_exec_ids,
|
||||
get_pending_reviews_for_execution,
|
||||
get_pending_reviews_for_user,
|
||||
get_reviews_by_node_exec_ids,
|
||||
has_pending_reviews_for_graph_exec,
|
||||
process_all_reviews_for_execution,
|
||||
)
|
||||
@@ -137,17 +137,17 @@ async def process_review_action(
|
||||
detail="At least one review must be provided",
|
||||
)
|
||||
|
||||
# Batch fetch all requested reviews
|
||||
reviews_map = await get_pending_reviews_by_node_exec_ids(
|
||||
# Batch fetch all requested reviews (regardless of status for idempotent handling)
|
||||
reviews_map = await get_reviews_by_node_exec_ids(
|
||||
list(all_request_node_ids), user_id
|
||||
)
|
||||
|
||||
# Validate all reviews were found
|
||||
# Validate all reviews were found (must exist, any status is OK for now)
|
||||
missing_ids = all_request_node_ids - set(reviews_map.keys())
|
||||
if missing_ids:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No pending review found for node execution(s): {', '.join(missing_ids)}",
|
||||
detail=f"Review(s) not found: {', '.join(missing_ids)}",
|
||||
)
|
||||
|
||||
# Validate all reviews belong to the same execution
|
||||
|
||||
@@ -364,8 +364,6 @@ async def execute_graph_block(
|
||||
obj = get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
if obj.disabled:
|
||||
raise HTTPException(status_code=403, detail=f"Block #{block_id} is disabled.")
|
||||
|
||||
user = await get_user_by_id(user_id)
|
||||
if not user:
|
||||
|
||||
@@ -138,7 +138,6 @@ def test_execute_graph_block(
|
||||
"""Test execute block endpoint"""
|
||||
# Mock block
|
||||
mock_block = Mock()
|
||||
mock_block.disabled = False
|
||||
|
||||
async def mock_execute(*args, **kwargs):
|
||||
yield "output1", {"data": "result1"}
|
||||
|
||||
@@ -263,11 +263,14 @@ async def get_pending_review_by_node_exec_id(
|
||||
return PendingHumanReviewModel.from_db(review, node_id=node_id)
|
||||
|
||||
|
||||
async def get_pending_reviews_by_node_exec_ids(
|
||||
async def get_reviews_by_node_exec_ids(
|
||||
node_exec_ids: list[str], user_id: str
|
||||
) -> dict[str, "PendingHumanReviewModel"]:
|
||||
"""
|
||||
Get multiple pending reviews by their node execution IDs in a single batch query.
|
||||
Get multiple reviews by their node execution IDs regardless of status.
|
||||
|
||||
Unlike get_pending_reviews_by_node_exec_ids, this returns reviews in any status
|
||||
(WAITING, APPROVED, REJECTED). Used for validation in idempotent operations.
|
||||
|
||||
Args:
|
||||
node_exec_ids: List of node execution IDs to look up
|
||||
@@ -283,7 +286,6 @@ async def get_pending_reviews_by_node_exec_ids(
|
||||
where={
|
||||
"nodeExecId": {"in": node_exec_ids},
|
||||
"userId": user_id,
|
||||
"status": ReviewStatus.WAITING,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -407,38 +409,68 @@ async def process_all_reviews_for_execution(
|
||||
) -> dict[str, PendingHumanReviewModel]:
|
||||
"""Process all pending reviews for an execution with approve/reject decisions.
|
||||
|
||||
Handles race conditions gracefully: if a review was already processed with the
|
||||
same decision by a concurrent request, it's treated as success rather than error.
|
||||
|
||||
Args:
|
||||
user_id: User ID for ownership validation
|
||||
review_decisions: Map of node_exec_id -> (status, reviewed_data, message)
|
||||
|
||||
Returns:
|
||||
Dict of node_exec_id -> updated review model
|
||||
Dict of node_exec_id -> updated review model (includes already-processed reviews)
|
||||
"""
|
||||
if not review_decisions:
|
||||
return {}
|
||||
|
||||
node_exec_ids = list(review_decisions.keys())
|
||||
|
||||
# Get all reviews for validation
|
||||
reviews = await PendingHumanReview.prisma().find_many(
|
||||
# Get all reviews (both WAITING and already processed) for the user
|
||||
all_reviews = await PendingHumanReview.prisma().find_many(
|
||||
where={
|
||||
"nodeExecId": {"in": node_exec_ids},
|
||||
"userId": user_id,
|
||||
"status": ReviewStatus.WAITING,
|
||||
},
|
||||
)
|
||||
|
||||
# Validate all reviews can be processed
|
||||
if len(reviews) != len(node_exec_ids):
|
||||
missing_ids = set(node_exec_ids) - {review.nodeExecId for review in reviews}
|
||||
# Separate into pending and already-processed reviews
|
||||
reviews_to_process = []
|
||||
already_processed = []
|
||||
for review in all_reviews:
|
||||
if review.status == ReviewStatus.WAITING:
|
||||
reviews_to_process.append(review)
|
||||
else:
|
||||
already_processed.append(review)
|
||||
|
||||
# Check for truly missing reviews (not found at all)
|
||||
found_ids = {review.nodeExecId for review in all_reviews}
|
||||
missing_ids = set(node_exec_ids) - found_ids
|
||||
if missing_ids:
|
||||
raise ValueError(
|
||||
f"Reviews not found, access denied, or not in WAITING status: {', '.join(missing_ids)}"
|
||||
f"Reviews not found or access denied: {', '.join(missing_ids)}"
|
||||
)
|
||||
|
||||
# Create parallel update tasks
|
||||
# Validate already-processed reviews have compatible status (same decision)
|
||||
# This handles race conditions where another request processed the same reviews
|
||||
for review in already_processed:
|
||||
requested_status = review_decisions[review.nodeExecId][0]
|
||||
if review.status != requested_status:
|
||||
raise ValueError(
|
||||
f"Review {review.nodeExecId} was already processed with status "
|
||||
f"{review.status}, cannot change to {requested_status}"
|
||||
)
|
||||
|
||||
# Log if we're handling a race condition (some reviews already processed)
|
||||
if already_processed:
|
||||
already_processed_ids = [r.nodeExecId for r in already_processed]
|
||||
logger.info(
|
||||
f"Race condition handled: {len(already_processed)} review(s) already "
|
||||
f"processed by concurrent request: {already_processed_ids}"
|
||||
)
|
||||
|
||||
# Create parallel update tasks for reviews that still need processing
|
||||
update_tasks = []
|
||||
|
||||
for review in reviews:
|
||||
for review in reviews_to_process:
|
||||
new_status, reviewed_data, message = review_decisions[review.nodeExecId]
|
||||
has_data_changes = reviewed_data is not None and reviewed_data != review.payload
|
||||
|
||||
@@ -463,7 +495,7 @@ async def process_all_reviews_for_execution(
|
||||
update_tasks.append(task)
|
||||
|
||||
# Execute all updates in parallel and get updated reviews
|
||||
updated_reviews = await asyncio.gather(*update_tasks)
|
||||
updated_reviews = await asyncio.gather(*update_tasks) if update_tasks else []
|
||||
|
||||
# Note: Execution resumption is now handled at the API layer after ALL reviews
|
||||
# for an execution are processed (both approved and rejected)
|
||||
@@ -472,8 +504,11 @@ async def process_all_reviews_for_execution(
|
||||
# Local import to avoid event loop conflicts in tests
|
||||
from backend.data.execution import get_node_execution
|
||||
|
||||
# Combine updated reviews with already-processed ones (for idempotent response)
|
||||
all_result_reviews = list(updated_reviews) + already_processed
|
||||
|
||||
result = {}
|
||||
for review in updated_reviews:
|
||||
for review in all_result_reviews:
|
||||
node_exec = await get_node_execution(review.nodeExecId)
|
||||
node_id = node_exec.node_id if node_exec else review.nodeExecId
|
||||
result[review.nodeExecId] = PendingHumanReviewModel.from_db(
|
||||
|
||||
@@ -679,12 +679,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
default="https://cloud.langfuse.com", description="Langfuse host URL"
|
||||
)
|
||||
|
||||
# PostHog analytics
|
||||
posthog_api_key: str = Field(default="", description="PostHog API key")
|
||||
posthog_host: str = Field(
|
||||
default="https://eu.i.posthog.com", description="PostHog host URL"
|
||||
)
|
||||
|
||||
# Add more secret fields as needed
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
|
||||
12
autogpt_platform/backend/poetry.lock
generated
12
autogpt_platform/backend/poetry.lock
generated
@@ -4204,14 +4204,14 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "7.6.0"
|
||||
version = "6.1.1"
|
||||
description = "Integrate PostHog into any python application."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "posthog-7.6.0-py3-none-any.whl", hash = "sha256:c4dd78cf77c4fecceb965f86066e5ac37886ef867d68ffe75a1db5d681d7d9ad"},
|
||||
{file = "posthog-7.6.0.tar.gz", hash = "sha256:941dfd278ee427c9b14640f09b35b5bb52a71bdf028d7dbb7307e1838fd3002e"},
|
||||
{file = "posthog-6.1.1-py3-none-any.whl", hash = "sha256:329fd3d06b4d54cec925f47235bd8e327c91403c2f9ec38f1deb849535934dba"},
|
||||
{file = "posthog-6.1.1.tar.gz", hash = "sha256:b453f54c4a2589da859fd575dd3bf86fcb40580727ec399535f268b1b9f318b8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4225,7 +4225,7 @@ typing-extensions = ">=4.2.0"
|
||||
[package.extras]
|
||||
dev = ["django-stubs", "lxml", "mypy", "mypy-baseline", "packaging", "pre-commit", "pydantic", "ruff", "setuptools", "tomli", "tomli_w", "twine", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six", "wheel"]
|
||||
langchain = ["langchain (>=0.2.0)"]
|
||||
test = ["anthropic (>=0.72)", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=1.0)", "langchain-community (>=0.4)", "langchain-core (>=1.0)", "langchain-openai (>=1.0)", "langgraph (>=1.0)", "mock (>=2.0.0)", "openai (>=2.0)", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
test = ["anthropic", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=0.3.15)", "langchain-community (>=0.3.25)", "langchain-core (>=0.3.65)", "langchain-openai (>=0.3.22)", "langgraph (>=0.4.8)", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "postmarker"
|
||||
@@ -7512,4 +7512,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "ee5742dc1a9df50dfc06d4b26a1682cbb2b25cab6b79ce5625ec272f93e4f4bf"
|
||||
content-hash = "18b92e09596298c82432e4d0a85cb6d80a40b4229bee0a0c15f0529fd6cb21a4"
|
||||
|
||||
@@ -85,7 +85,6 @@ exa-py = "^1.14.20"
|
||||
croniter = "^6.0.0"
|
||||
stagehand = "^0.5.1"
|
||||
gravitas-md2gdocs = "^0.1.0"
|
||||
posthog = "^7.6.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aiohappyeyeballs = "^2.6.1"
|
||||
|
||||
@@ -30,7 +30,3 @@ NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# PR previews
|
||||
NEXT_PUBLIC_PREVIEW_STEALING_DEV=
|
||||
|
||||
# PostHog Analytics
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
@@ -34,7 +34,6 @@
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@posthog/react": "1.7.0",
|
||||
"@radix-ui/react-accordion": "1.2.12",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
"@radix-ui/react-avatar": "1.1.10",
|
||||
@@ -92,7 +91,6 @@
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.7.2",
|
||||
"party-js": "2.2.0",
|
||||
"posthog-js": "1.334.1",
|
||||
"react": "18.3.1",
|
||||
"react-currency-input-field": "4.0.3",
|
||||
"react-day-picker": "9.11.1",
|
||||
@@ -122,6 +120,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.1.2",
|
||||
"happy-dom": "20.3.4",
|
||||
"@opentelemetry/instrumentation": "0.209.0",
|
||||
"@playwright/test": "1.56.1",
|
||||
"@storybook/addon-a11y": "9.1.5",
|
||||
@@ -149,7 +148,6 @@
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.5.7",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
"happy-dom": "20.3.4",
|
||||
"import-in-the-middle": "2.0.2",
|
||||
"msw": "2.11.6",
|
||||
"msw-storybook-addon": "2.0.6",
|
||||
|
||||
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -23,9 +23,6 @@ importers:
|
||||
'@phosphor-icons/react':
|
||||
specifier: 2.1.10
|
||||
version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
'@posthog/react':
|
||||
specifier: 1.7.0
|
||||
version: 1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)
|
||||
'@radix-ui/react-accordion':
|
||||
specifier: 1.2.12
|
||||
version: 1.2.12(@types/react-dom@18.3.5(@types/react@18.3.17))(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -197,9 +194,6 @@ importers:
|
||||
party-js:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0
|
||||
posthog-js:
|
||||
specifier: 1.334.1
|
||||
version: 1.334.1
|
||||
react:
|
||||
specifier: 18.3.1
|
||||
version: 18.3.1
|
||||
@@ -1800,10 +1794,6 @@ packages:
|
||||
'@open-draft/until@2.1.0':
|
||||
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
resolution: {integrity: sha512-xomnUNi7TiAGtOgs0tb54LyrjRZLu9shJGGwkcN7NgtiPYOpNnKLkRJtzZvTjD/w6knSZH9sFZcUSUovYOPg6A==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
@@ -1824,12 +1814,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.0.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0':
|
||||
resolution: {integrity: sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0':
|
||||
resolution: {integrity: sha512-5ULoU8p+tWcQw5PDYZn8rySptGSLZHNX/7srqo2TioPnAAcvTy6sQFQXsNPrAnyRRtYGMetXVyZUy5OaX1+IfA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1968,18 +1952,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0':
|
||||
resolution: {integrity: sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0':
|
||||
resolution: {integrity: sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2':
|
||||
resolution: {integrity: sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1990,18 +1962,6 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.3.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0':
|
||||
resolution: {integrity: sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.4.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0':
|
||||
resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.9.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0':
|
||||
resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -2090,57 +2050,11 @@ packages:
|
||||
webpack-plugin-serve:
|
||||
optional: true
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
resolution: {integrity: sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg==}
|
||||
|
||||
'@posthog/react@1.7.0':
|
||||
resolution: {integrity: sha512-pM7GL7z/rKjiIwosbRiQA3buhLI6vUo+wg+T/ZrVZC7O5bVU07TfgNZTcuOj8E9dx7vDbfNrc1kjDN7PKMM8ug==}
|
||||
peerDependencies:
|
||||
'@types/react': '>=16.8.0'
|
||||
posthog-js: '>=1.257.2'
|
||||
react: '>=16.8.0'
|
||||
peerDependenciesMeta:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
'@posthog/types@1.334.1':
|
||||
resolution: {integrity: sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA==}
|
||||
|
||||
'@prisma/instrumentation@6.19.0':
|
||||
resolution: {integrity: sha512-QcuYy25pkXM8BJ37wVFBO7Zh34nyRV1GOb2n3lPkkbRYfl4hWl3PTcImP41P0KrzVXfa/45p6eVCos27x3exIg==}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.8
|
||||
|
||||
'@protobufjs/aspromise@1.1.2':
|
||||
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
|
||||
|
||||
'@protobufjs/base64@1.1.2':
|
||||
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
|
||||
|
||||
'@protobufjs/codegen@2.0.4':
|
||||
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0':
|
||||
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
|
||||
|
||||
'@protobufjs/float@1.0.2':
|
||||
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
|
||||
|
||||
'@protobufjs/inquire@1.1.0':
|
||||
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
|
||||
|
||||
'@protobufjs/path@1.1.2':
|
||||
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
|
||||
|
||||
'@protobufjs/pool@1.1.0':
|
||||
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
|
||||
|
||||
'@protobufjs/utf8@1.1.0':
|
||||
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
|
||||
|
||||
'@radix-ui/number@1.1.1':
|
||||
resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==}
|
||||
|
||||
@@ -3487,9 +3401,6 @@ packages:
|
||||
'@types/tedious@4.0.14':
|
||||
resolution: {integrity: sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==}
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
|
||||
|
||||
'@types/unist@2.0.11':
|
||||
resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==}
|
||||
|
||||
@@ -4367,9 +4278,6 @@ packages:
|
||||
core-js-pure@3.47.0:
|
||||
resolution: {integrity: sha512-BcxeDbzUrRnXGYIVAGFtcGQVNpFcUhVjr6W7F8XktvQW2iJP9e66GP6xdKotCRFlrxBvNIBrhwKteRXqMV86Nw==}
|
||||
|
||||
core-js@3.48.0:
|
||||
resolution: {integrity: sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==}
|
||||
|
||||
core-util-is@1.0.3:
|
||||
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||
|
||||
@@ -4661,9 +4569,6 @@ packages:
|
||||
resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
|
||||
engines: {node: '>= 4'}
|
||||
|
||||
dompurify@3.3.1:
|
||||
resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==}
|
||||
|
||||
domutils@2.8.0:
|
||||
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
|
||||
|
||||
@@ -5034,9 +4939,6 @@ packages:
|
||||
picomatch:
|
||||
optional: true
|
||||
|
||||
fflate@0.4.8:
|
||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
||||
engines: {node: ^10.12.0 || >=12.0.0}
|
||||
@@ -5843,9 +5745,6 @@ packages:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
long@5.3.2:
|
||||
resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -6635,12 +6534,6 @@ packages:
|
||||
resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
posthog-js@1.334.1:
|
||||
resolution: {integrity: sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ==}
|
||||
|
||||
preact@10.28.2:
|
||||
resolution: {integrity: sha512-lbteaWGzGHdlIuiJ0l2Jq454m6kcpI1zNje6d8MlGAFlYvP2GO4ibnat7P74Esfz4sPTdM6UxtTwh/d3pwM9JA==}
|
||||
|
||||
prelude-ls@1.2.1:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@@ -6729,10 +6622,6 @@ packages:
|
||||
property-information@7.1.0:
|
||||
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
proxy-from-env@1.1.0:
|
||||
resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==}
|
||||
|
||||
@@ -6754,9 +6643,6 @@ packages:
|
||||
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||
engines: {node: '>=0.6'}
|
||||
|
||||
query-selector-shadow-dom@1.0.1:
|
||||
resolution: {integrity: sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw==}
|
||||
|
||||
querystring-es3@0.2.1:
|
||||
resolution: {integrity: sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==}
|
||||
engines: {node: '>=0.4.x'}
|
||||
@@ -7935,9 +7821,6 @@ packages:
|
||||
web-namespaces@2.0.1:
|
||||
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==}
|
||||
|
||||
web-vitals@5.1.0:
|
||||
resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==}
|
||||
|
||||
webidl-conversions@3.0.1:
|
||||
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
|
||||
|
||||
@@ -9537,10 +9420,6 @@ snapshots:
|
||||
|
||||
'@open-draft/until@2.1.0': {}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9556,15 +9435,6 @@ snapshots:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9759,23 +9629,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
protobufjs: 7.5.4
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2': {}
|
||||
|
||||
'@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
@@ -9784,19 +9637,6 @@ snapshots:
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9961,19 +9801,6 @@ snapshots:
|
||||
type-fest: 4.41.0
|
||||
webpack-hot-middleware: 2.26.1
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
dependencies:
|
||||
cross-spawn: 7.0.6
|
||||
|
||||
'@posthog/react@1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)':
|
||||
dependencies:
|
||||
posthog-js: 1.334.1
|
||||
react: 18.3.1
|
||||
optionalDependencies:
|
||||
'@types/react': 18.3.17
|
||||
|
||||
'@posthog/types@1.334.1': {}
|
||||
|
||||
'@prisma/instrumentation@6.19.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9981,29 +9808,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@protobufjs/aspromise@1.1.2': {}
|
||||
|
||||
'@protobufjs/base64@1.1.2': {}
|
||||
|
||||
'@protobufjs/codegen@2.0.4': {}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0': {}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
|
||||
'@protobufjs/float@1.0.2': {}
|
||||
|
||||
'@protobufjs/inquire@1.1.0': {}
|
||||
|
||||
'@protobufjs/path@1.1.2': {}
|
||||
|
||||
'@protobufjs/pool@1.1.0': {}
|
||||
|
||||
'@protobufjs/utf8@1.1.0': {}
|
||||
|
||||
'@radix-ui/number@1.1.1': {}
|
||||
|
||||
'@radix-ui/primitive@1.1.3': {}
|
||||
@@ -11622,9 +11426,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 24.10.0
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
optional: true
|
||||
|
||||
'@types/unist@2.0.11': {}
|
||||
|
||||
'@types/unist@3.0.3': {}
|
||||
@@ -12526,8 +12327,6 @@ snapshots:
|
||||
|
||||
core-js-pure@3.47.0: {}
|
||||
|
||||
core-js@3.48.0: {}
|
||||
|
||||
core-util-is@1.0.3: {}
|
||||
|
||||
cosmiconfig@7.1.0:
|
||||
@@ -12837,10 +12636,6 @@ snapshots:
|
||||
dependencies:
|
||||
domelementtype: 2.3.0
|
||||
|
||||
dompurify@3.3.1:
|
||||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
domutils@2.8.0:
|
||||
dependencies:
|
||||
dom-serializer: 1.4.1
|
||||
@@ -13410,8 +13205,6 @@ snapshots:
|
||||
optionalDependencies:
|
||||
picomatch: 4.0.3
|
||||
|
||||
fflate@0.4.8: {}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
dependencies:
|
||||
flat-cache: 3.2.0
|
||||
@@ -14299,8 +14092,6 @@ snapshots:
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
long@5.3.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -15363,24 +15154,6 @@ snapshots:
|
||||
dependencies:
|
||||
xtend: 4.0.2
|
||||
|
||||
posthog-js@1.334.1:
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/exporter-logs-otlp-http': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@posthog/core': 1.13.0
|
||||
'@posthog/types': 1.334.1
|
||||
core-js: 3.48.0
|
||||
dompurify: 3.3.1
|
||||
fflate: 0.4.8
|
||||
preact: 10.28.2
|
||||
query-selector-shadow-dom: 1.0.1
|
||||
web-vitals: 5.1.0
|
||||
|
||||
preact@10.28.2: {}
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.1(prettier@3.6.2):
|
||||
@@ -15414,21 +15187,6 @@ snapshots:
|
||||
|
||||
property-information@7.1.0: {}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/base64': 1.1.2
|
||||
'@protobufjs/codegen': 2.0.4
|
||||
'@protobufjs/eventemitter': 1.1.0
|
||||
'@protobufjs/fetch': 1.1.0
|
||||
'@protobufjs/float': 1.0.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
'@protobufjs/path': 1.1.2
|
||||
'@protobufjs/pool': 1.1.0
|
||||
'@protobufjs/utf8': 1.1.0
|
||||
'@types/node': 24.10.0
|
||||
long: 5.3.2
|
||||
|
||||
proxy-from-env@1.1.0: {}
|
||||
|
||||
public-encrypt@4.0.3:
|
||||
@@ -15450,8 +15208,6 @@ snapshots:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
|
||||
query-selector-shadow-dom@1.0.1: {}
|
||||
|
||||
querystring-es3@0.2.1: {}
|
||||
|
||||
queue-microtask@1.2.3: {}
|
||||
@@ -16863,8 +16619,6 @@ snapshots:
|
||||
|
||||
web-namespaces@2.0.1: {}
|
||||
|
||||
web-vitals@5.1.0: {}
|
||||
|
||||
webidl-conversions@3.0.1: {}
|
||||
|
||||
webidl-conversions@8.0.1:
|
||||
|
||||
@@ -38,12 +38,8 @@ export const AgentOutputs = ({ flowID }: { flowID: string | null }) => {
|
||||
|
||||
return outputNodes
|
||||
.map((node) => {
|
||||
const executionResults = node.data.nodeExecutionResults || [];
|
||||
const latestResult =
|
||||
executionResults.length > 0
|
||||
? executionResults[executionResults.length - 1]
|
||||
: undefined;
|
||||
const outputData = latestResult?.output_data?.output;
|
||||
const executionResult = node.data.nodeExecutionResult;
|
||||
const outputData = executionResult?.output_data?.output;
|
||||
|
||||
const renderer = globalRegistry.getRenderer(outputData);
|
||||
|
||||
|
||||
@@ -153,9 +153,6 @@ export const useRunInputDialog = ({
|
||||
Object.entries(credentialValues).filter(([_, cred]) => cred && cred.id),
|
||||
);
|
||||
|
||||
useNodeStore.getState().clearAllNodeExecutionResults();
|
||||
useNodeStore.getState().cleanNodesStatuses();
|
||||
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
|
||||
@@ -34,7 +34,7 @@ export type CustomNodeData = {
|
||||
uiType: BlockUIType;
|
||||
block_id: string;
|
||||
status?: AgentExecutionStatus;
|
||||
nodeExecutionResults?: NodeExecutionResult[];
|
||||
nodeExecutionResult?: NodeExecutionResult;
|
||||
staticOutput?: boolean;
|
||||
// TODO : We need better type safety for the following backend fields.
|
||||
costs: BlockCost[];
|
||||
@@ -75,11 +75,7 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
|
||||
(value) => value !== null && value !== undefined && value !== "",
|
||||
);
|
||||
|
||||
const latestResult =
|
||||
data.nodeExecutionResults && data.nodeExecutionResults.length > 0
|
||||
? data.nodeExecutionResults[data.nodeExecutionResults.length - 1]
|
||||
: undefined;
|
||||
const outputData = latestResult?.output_data;
|
||||
const outputData = data.nodeExecutionResult?.output_data;
|
||||
const hasOutputError =
|
||||
typeof outputData === "object" &&
|
||||
outputData !== null &&
|
||||
|
||||
@@ -14,15 +14,10 @@ import { useNodeOutput } from "./useNodeOutput";
|
||||
import { ViewMoreData } from "./components/ViewMoreData";
|
||||
|
||||
export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
const {
|
||||
latestOutputData,
|
||||
copiedKey,
|
||||
handleCopy,
|
||||
executionResultId,
|
||||
latestInputData,
|
||||
} = useNodeOutput(nodeId);
|
||||
const { outputData, copiedKey, handleCopy, executionResultId, inputData } =
|
||||
useNodeOutput(nodeId);
|
||||
|
||||
if (Object.keys(latestOutputData).length === 0) {
|
||||
if (Object.keys(outputData).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -46,19 +41,18 @@ export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
<div className="space-y-2">
|
||||
<Text variant="small-medium">Input</Text>
|
||||
|
||||
<ContentRenderer value={latestInputData} shortContent={false} />
|
||||
<ContentRenderer value={inputData} shortContent={false} />
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={inputData}
|
||||
pinName="Input"
|
||||
nodeId={nodeId}
|
||||
execId={executionResultId}
|
||||
dataType="input"
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy("input", latestInputData)}
|
||||
onClick={() => handleCopy("input", inputData)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === "input" &&
|
||||
@@ -74,72 +68,70 @@ export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{Object.entries(latestOutputData)
|
||||
{Object.entries(outputData)
|
||||
.slice(0, 2)
|
||||
.map(([key, value]) => {
|
||||
return (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="small-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="small"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer
|
||||
value={item}
|
||||
shortContent={true}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
pinName={key}
|
||||
nodeId={nodeId}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon
|
||||
size={12}
|
||||
className="text-green-600"
|
||||
/>
|
||||
) : (
|
||||
<CopyIcon size={12} />
|
||||
)}
|
||||
</Button>
|
||||
.map(([key, value]) => (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="small-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="small"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer value={item} shortContent={true} />
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={value}
|
||||
pinName={key}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon size={12} className="text-green-600" />
|
||||
) : (
|
||||
<CopyIcon size={12} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<ViewMoreData nodeId={nodeId} />
|
||||
|
||||
{Object.keys(outputData).length > 2 && (
|
||||
<ViewMoreData
|
||||
outputData={outputData}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
)}
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
|
||||
@@ -19,51 +19,22 @@ import {
|
||||
CopyIcon,
|
||||
DownloadIcon,
|
||||
} from "@phosphor-icons/react";
|
||||
import React, { FC } from "react";
|
||||
import { FC } from "react";
|
||||
import { useNodeDataViewer } from "./useNodeDataViewer";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { NodeDataType } from "../../helpers";
|
||||
|
||||
export interface NodeDataViewerProps {
|
||||
data?: any;
|
||||
interface NodeDataViewerProps {
|
||||
data: any;
|
||||
pinName: string;
|
||||
nodeId?: string;
|
||||
execId?: string;
|
||||
isViewMoreData?: boolean;
|
||||
dataType?: NodeDataType;
|
||||
}
|
||||
|
||||
export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
data,
|
||||
pinName,
|
||||
nodeId,
|
||||
execId = "N/A",
|
||||
isViewMoreData = false,
|
||||
dataType = "output",
|
||||
}) => {
|
||||
const executionResults = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getNodeExecutionResults(nodeId) : [],
|
||||
),
|
||||
);
|
||||
const latestInputData = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getLatestNodeInputData(nodeId) : undefined,
|
||||
),
|
||||
);
|
||||
const accumulatedOutputData = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getAccumulatedNodeOutputData(nodeId) : {},
|
||||
),
|
||||
);
|
||||
|
||||
const resolvedData =
|
||||
data ??
|
||||
(dataType === "input"
|
||||
? (latestInputData ?? {})
|
||||
: (accumulatedOutputData[pinName] ?? []));
|
||||
|
||||
const {
|
||||
outputItems,
|
||||
copyExecutionId,
|
||||
@@ -71,20 +42,7 @@ export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
handleDownloadItem,
|
||||
dataArray,
|
||||
copiedIndex,
|
||||
groupedExecutions,
|
||||
totalGroupedItems,
|
||||
handleCopyGroupedItem,
|
||||
handleDownloadGroupedItem,
|
||||
copiedKey,
|
||||
} = useNodeDataViewer(
|
||||
resolvedData,
|
||||
pinName,
|
||||
execId,
|
||||
executionResults,
|
||||
dataType,
|
||||
);
|
||||
|
||||
const shouldGroupExecutions = groupedExecutions.length > 0;
|
||||
} = useNodeDataViewer(data, pinName, execId);
|
||||
return (
|
||||
<Dialog styling={{ width: "600px" }}>
|
||||
<TooltipProvider>
|
||||
@@ -110,141 +68,44 @@ export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="large-medium" className="text-slate-900">
|
||||
Full {dataType === "input" ? "Input" : "Output"} Preview
|
||||
Full Output Preview
|
||||
</Text>
|
||||
</div>
|
||||
<div className="rounded-full border border-slate-300 bg-slate-100 px-3 py-1.5 text-xs font-medium text-black">
|
||||
{shouldGroupExecutions ? totalGroupedItems : dataArray.length}{" "}
|
||||
item
|
||||
{shouldGroupExecutions
|
||||
? totalGroupedItems !== 1
|
||||
? "s"
|
||||
: ""
|
||||
: dataArray.length !== 1
|
||||
? "s"
|
||||
: ""}{" "}
|
||||
total
|
||||
{dataArray.length} item{dataArray.length !== 1 ? "s" : ""} total
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">
|
||||
{shouldGroupExecutions ? (
|
||||
<div>
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">{beautifyString(pinName)}</span>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="mt-2">
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">
|
||||
{beautifyString(pinName)}
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="mt-2">
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">{beautifyString(pinName)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<ScrollArea className="h-full">
|
||||
<div className="my-4">
|
||||
{shouldGroupExecutions ? (
|
||||
<div className="space-y-4">
|
||||
{groupedExecutions.map((execution) => (
|
||||
<div
|
||||
key={execution.execId}
|
||||
className="rounded-3xl border border-slate-200 bg-white p-4 shadow-sm"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execution.execId}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="mt-2 space-y-4">
|
||||
{execution.outputItems.length > 0 ? (
|
||||
execution.outputItems.map((item, index) => (
|
||||
<div
|
||||
key={item.key}
|
||||
className="group flex items-start gap-4"
|
||||
>
|
||||
<div className="w-full flex-1">
|
||||
<OutputItem
|
||||
value={item.value}
|
||||
metadata={item.metadata}
|
||||
renderer={item.renderer}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex w-fit gap-3">
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="min-w-0 p-1"
|
||||
size="icon"
|
||||
onClick={() =>
|
||||
handleCopyGroupedItem(
|
||||
execution.execId,
|
||||
index,
|
||||
item,
|
||||
)
|
||||
}
|
||||
aria-label="Copy item"
|
||||
>
|
||||
{copiedKey ===
|
||||
`${execution.execId}-${index}` ? (
|
||||
<CheckIcon className="size-4 text-green-600" />
|
||||
) : (
|
||||
<CopyIcon className="size-4 text-black" />
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="icon"
|
||||
className="min-w-0 p-1"
|
||||
onClick={() =>
|
||||
handleDownloadGroupedItem(item)
|
||||
}
|
||||
aria-label="Download item"
|
||||
>
|
||||
<DownloadIcon className="size-4 text-black" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<div className="py-4 text-center text-gray-500">
|
||||
No data available
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : dataArray.length > 0 ? (
|
||||
{dataArray.length > 0 ? (
|
||||
<div className="space-y-4">
|
||||
{outputItems.map((item, index) => (
|
||||
<div key={item.key} className="group relative">
|
||||
|
||||
@@ -1,70 +1,82 @@
|
||||
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
|
||||
import { globalRegistry } from "@/components/contextual/OutputRenderers";
|
||||
import { downloadOutputs } from "@/components/contextual/OutputRenderers/utils/download";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import { useState } from "react";
|
||||
import type { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import {
|
||||
NodeDataType,
|
||||
createOutputItems,
|
||||
getExecutionData,
|
||||
normalizeToArray,
|
||||
type OutputItem,
|
||||
} from "../../helpers";
|
||||
|
||||
export type GroupedExecution = {
|
||||
execId: string;
|
||||
outputItems: Array<OutputItem>;
|
||||
};
|
||||
import React, { useMemo, useState } from "react";
|
||||
|
||||
export const useNodeDataViewer = (
|
||||
data: any,
|
||||
pinName: string,
|
||||
execId: string,
|
||||
executionResults?: NodeExecutionResult[],
|
||||
dataType?: NodeDataType,
|
||||
) => {
|
||||
const { toast } = useToast();
|
||||
const [copiedIndex, setCopiedIndex] = useState<number | null>(null);
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
|
||||
const dataArray = Array.isArray(data) ? data : [data];
|
||||
// Normalize data to array format
|
||||
const dataArray = useMemo(() => {
|
||||
return Array.isArray(data) ? data : [data];
|
||||
}, [data]);
|
||||
|
||||
const outputItems =
|
||||
!dataArray || dataArray.length === 0
|
||||
? []
|
||||
: createOutputItems(dataArray).map((item, index) => ({
|
||||
...item,
|
||||
// Prepare items for the enhanced renderer system
|
||||
const outputItems = useMemo(() => {
|
||||
if (!dataArray) return [];
|
||||
|
||||
const items: Array<{
|
||||
key: string;
|
||||
label: string;
|
||||
value: unknown;
|
||||
metadata?: OutputMetadata;
|
||||
renderer: any;
|
||||
}> = [];
|
||||
|
||||
dataArray.forEach((value, index) => {
|
||||
const metadata: OutputMetadata = {};
|
||||
|
||||
// Extract metadata from the value if it's an object
|
||||
if (
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
!React.isValidElement(value)
|
||||
) {
|
||||
const objValue = value as any;
|
||||
if (objValue.type) metadata.type = objValue.type;
|
||||
if (objValue.mimeType) metadata.mimeType = objValue.mimeType;
|
||||
if (objValue.filename) metadata.filename = objValue.filename;
|
||||
if (objValue.language) metadata.language = objValue.language;
|
||||
}
|
||||
|
||||
const renderer = globalRegistry.getRenderer(value, metadata);
|
||||
if (renderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
label: index === 0 ? beautifyString(pinName) : "",
|
||||
}));
|
||||
|
||||
const groupedExecutions =
|
||||
!executionResults || executionResults.length === 0
|
||||
? []
|
||||
: [...executionResults].reverse().map((result) => {
|
||||
const rawData = getExecutionData(
|
||||
result,
|
||||
dataType || "output",
|
||||
pinName,
|
||||
);
|
||||
let dataArray: unknown[];
|
||||
if (dataType === "input") {
|
||||
dataArray =
|
||||
rawData !== undefined && rawData !== null ? [rawData] : [];
|
||||
} else {
|
||||
dataArray = normalizeToArray(rawData);
|
||||
}
|
||||
|
||||
const outputItems = createOutputItems(dataArray);
|
||||
return {
|
||||
execId: result.node_exec_id,
|
||||
outputItems,
|
||||
};
|
||||
value,
|
||||
metadata,
|
||||
renderer,
|
||||
});
|
||||
} else {
|
||||
// Fallback to text renderer
|
||||
const textRenderer = globalRegistry
|
||||
.getAllRenderers()
|
||||
.find((r) => r.name === "TextRenderer");
|
||||
if (textRenderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
label: index === 0 ? beautifyString(pinName) : "",
|
||||
value:
|
||||
typeof value === "string"
|
||||
? value
|
||||
: JSON.stringify(value, null, 2),
|
||||
metadata,
|
||||
renderer: textRenderer,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const totalGroupedItems = groupedExecutions.reduce(
|
||||
(total, execution) => total + execution.outputItems.length,
|
||||
0,
|
||||
);
|
||||
return items;
|
||||
}, [dataArray, pinName]);
|
||||
|
||||
const copyExecutionId = () => {
|
||||
navigator.clipboard.writeText(execId).then(() => {
|
||||
@@ -110,45 +122,6 @@ export const useNodeDataViewer = (
|
||||
]);
|
||||
};
|
||||
|
||||
const handleCopyGroupedItem = async (
|
||||
execId: string,
|
||||
index: number,
|
||||
item: OutputItem,
|
||||
) => {
|
||||
const copyContent = item.renderer.getCopyContent(item.value, item.metadata);
|
||||
|
||||
if (!copyContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let text: string;
|
||||
if (typeof copyContent.data === "string") {
|
||||
text = copyContent.data;
|
||||
} else if (copyContent.fallbackText) {
|
||||
text = copyContent.fallbackText;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
await navigator.clipboard.writeText(text);
|
||||
setCopiedKey(`${execId}-${index}`);
|
||||
setTimeout(() => setCopiedKey(null), 2000);
|
||||
} catch (error) {
|
||||
console.error("Failed to copy:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadGroupedItem = (item: OutputItem) => {
|
||||
downloadOutputs([
|
||||
{
|
||||
value: item.value,
|
||||
metadata: item.metadata,
|
||||
renderer: item.renderer,
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
||||
return {
|
||||
outputItems,
|
||||
dataArray,
|
||||
@@ -156,10 +129,5 @@ export const useNodeDataViewer = (
|
||||
handleCopyItem,
|
||||
handleDownloadItem,
|
||||
copiedIndex,
|
||||
groupedExecutions,
|
||||
totalGroupedItems,
|
||||
handleCopyGroupedItem,
|
||||
handleDownloadGroupedItem,
|
||||
copiedKey,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -8,28 +8,16 @@ import { useState } from "react";
|
||||
import { NodeDataViewer } from "./NodeDataViewer/NodeDataViewer";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { CheckIcon, CopyIcon } from "@phosphor-icons/react";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import {
|
||||
NodeDataType,
|
||||
getExecutionEntries,
|
||||
normalizeToArray,
|
||||
} from "../helpers";
|
||||
|
||||
export const ViewMoreData = ({
|
||||
nodeId,
|
||||
dataType = "output",
|
||||
outputData,
|
||||
execId,
|
||||
}: {
|
||||
nodeId: string;
|
||||
dataType?: NodeDataType;
|
||||
outputData: Record<string, Array<any>>;
|
||||
execId?: string;
|
||||
}) => {
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
const { toast } = useToast();
|
||||
const executionResults = useNodeStore(
|
||||
useShallow((state) => state.getNodeExecutionResults(nodeId)),
|
||||
);
|
||||
|
||||
const reversedExecutionResults = [...executionResults].reverse();
|
||||
|
||||
const handleCopy = (key: string, value: any) => {
|
||||
const textToCopy =
|
||||
@@ -41,8 +29,8 @@ export const ViewMoreData = ({
|
||||
setTimeout(() => setCopiedKey(null), 2000);
|
||||
};
|
||||
|
||||
const copyExecutionId = (executionId: string) => {
|
||||
navigator.clipboard.writeText(executionId || "N/A").then(() => {
|
||||
const copyExecutionId = () => {
|
||||
navigator.clipboard.writeText(execId || "N/A").then(() => {
|
||||
toast({
|
||||
title: "Execution ID copied to clipboard!",
|
||||
duration: 2000,
|
||||
@@ -54,7 +42,7 @@ export const ViewMoreData = ({
|
||||
<Dialog styling={{ width: "600px", paddingRight: "16px" }}>
|
||||
<Dialog.Trigger>
|
||||
<Button
|
||||
variant="secondary"
|
||||
variant="primary"
|
||||
size="small"
|
||||
className="h-fit w-fit min-w-0 !text-xs"
|
||||
>
|
||||
@@ -64,114 +52,83 @@ export const ViewMoreData = ({
|
||||
<Dialog.Content>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Text variant="h4" className="text-slate-900">
|
||||
Complete {dataType === "input" ? "Input" : "Output"} Data
|
||||
Complete Output Data
|
||||
</Text>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<ScrollArea className="h-full">
|
||||
<div className="flex flex-col gap-4">
|
||||
{reversedExecutionResults.map((result) => (
|
||||
<div
|
||||
key={result.node_exec_id}
|
||||
className="rounded-3xl border border-slate-200 bg-white p-4 shadow-sm"
|
||||
>
|
||||
{Object.entries(outputData).map(([key, value]) => (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
{result.node_exec_id}
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="body-medium" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={() => copyExecutionId(result.node_exec_id)}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer value={item} shortContent={false} />
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-4 flex flex-col gap-4">
|
||||
{getExecutionEntries(result, dataType).map(
|
||||
([key, value]) => {
|
||||
const normalizedValue = normalizeToArray(value);
|
||||
return (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="text-slate-700"
|
||||
>
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{normalizedValue.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer
|
||||
value={item}
|
||||
shortContent={false}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={normalizedValue}
|
||||
pinName={key}
|
||||
execId={result.node_exec_id}
|
||||
isViewMoreData={true}
|
||||
dataType={dataType}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() =>
|
||||
handleCopy(
|
||||
`${result.node_exec_id}-${key}`,
|
||||
normalizedValue,
|
||||
)
|
||||
}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey ===
|
||||
`${result.node_exec_id}-${key}` &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey ===
|
||||
`${result.node_exec_id}-${key}` ? (
|
||||
<CheckIcon
|
||||
size={16}
|
||||
className="text-green-600"
|
||||
/>
|
||||
) : (
|
||||
<CopyIcon size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
)}
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={value}
|
||||
pinName={key}
|
||||
execId={execId}
|
||||
isViewMoreData={true}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon size={16} className="text-green-600" />
|
||||
) : (
|
||||
<CopyIcon size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
import type { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
|
||||
import { globalRegistry } from "@/components/contextual/OutputRenderers";
|
||||
import React from "react";
|
||||
|
||||
export type NodeDataType = "input" | "output";
|
||||
|
||||
export type OutputItem = {
|
||||
key: string;
|
||||
value: unknown;
|
||||
metadata?: OutputMetadata;
|
||||
renderer: any;
|
||||
};
|
||||
|
||||
export const normalizeToArray = (value: unknown) => {
|
||||
if (value === undefined) return [];
|
||||
return Array.isArray(value) ? value : [value];
|
||||
};
|
||||
|
||||
export const getExecutionData = (
|
||||
result: NodeExecutionResult,
|
||||
dataType: NodeDataType,
|
||||
pinName: string,
|
||||
) => {
|
||||
if (dataType === "input") {
|
||||
return result.input_data;
|
||||
}
|
||||
|
||||
return result.output_data?.[pinName];
|
||||
};
|
||||
|
||||
export const createOutputItems = (dataArray: unknown[]): Array<OutputItem> => {
|
||||
const items: Array<OutputItem> = [];
|
||||
|
||||
dataArray.forEach((value, index) => {
|
||||
const metadata: OutputMetadata = {};
|
||||
|
||||
if (
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
!React.isValidElement(value)
|
||||
) {
|
||||
const objValue = value as any;
|
||||
if (objValue.type) metadata.type = objValue.type;
|
||||
if (objValue.mimeType) metadata.mimeType = objValue.mimeType;
|
||||
if (objValue.filename) metadata.filename = objValue.filename;
|
||||
if (objValue.language) metadata.language = objValue.language;
|
||||
}
|
||||
|
||||
const renderer = globalRegistry.getRenderer(value, metadata);
|
||||
if (renderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
value,
|
||||
metadata,
|
||||
renderer,
|
||||
});
|
||||
} else {
|
||||
const textRenderer = globalRegistry
|
||||
.getAllRenderers()
|
||||
.find((r) => r.name === "TextRenderer");
|
||||
if (textRenderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
value:
|
||||
typeof value === "string" ? value : JSON.stringify(value, null, 2),
|
||||
metadata,
|
||||
renderer: textRenderer,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return items;
|
||||
};
|
||||
|
||||
export const getExecutionEntries = (
|
||||
result: NodeExecutionResult,
|
||||
dataType: NodeDataType,
|
||||
) => {
|
||||
const data = dataType === "input" ? result.input_data : result.output_data;
|
||||
return Object.entries(data || {});
|
||||
};
|
||||
@@ -7,18 +7,15 @@ export const useNodeOutput = (nodeId: string) => {
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
const { toast } = useToast();
|
||||
|
||||
const latestResult = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeExecutionResult(nodeId)),
|
||||
const nodeExecutionResult = useNodeStore(
|
||||
useShallow((state) => state.getNodeExecutionResult(nodeId)),
|
||||
);
|
||||
|
||||
const latestInputData = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeInputData(nodeId)),
|
||||
);
|
||||
|
||||
const latestOutputData: Record<string, Array<any>> = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeOutputData(nodeId) || {}),
|
||||
);
|
||||
const inputData = nodeExecutionResult?.input_data;
|
||||
|
||||
const outputData: Record<string, Array<any>> = {
|
||||
...nodeExecutionResult?.output_data,
|
||||
};
|
||||
const handleCopy = async (key: string, value: any) => {
|
||||
try {
|
||||
const text = JSON.stringify(value, null, 2);
|
||||
@@ -38,12 +35,11 @@ export const useNodeOutput = (nodeId: string) => {
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
latestOutputData,
|
||||
latestInputData,
|
||||
outputData,
|
||||
inputData,
|
||||
copiedKey,
|
||||
handleCopy,
|
||||
executionResultId: latestResult?.node_exec_id,
|
||||
executionResultId: nodeExecutionResult?.node_exec_id,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { useState, useCallback, useEffect } from "react";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import {
|
||||
useNodeStore,
|
||||
NodeResolutionData,
|
||||
} from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
||||
import {
|
||||
useSubAgentUpdate,
|
||||
@@ -10,7 +13,6 @@ import {
|
||||
} from "@/app/(platform)/build/hooks/useSubAgentUpdate";
|
||||
import { GraphInputSchema, GraphOutputSchema } from "@/lib/autogpt-server-api";
|
||||
import { CustomNodeData } from "../../CustomNode";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/types";
|
||||
|
||||
// Stable empty set to avoid creating new references in selectors
|
||||
const EMPTY_SET: Set<string> = new Set();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/types";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { RJSFSchema } from "@rjsf/utils";
|
||||
|
||||
export const nodeStyleBasedOnStatus: Record<AgentExecutionStatus, string> = {
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
export const accumulateExecutionData = (
|
||||
accumulated: Record<string, unknown[]>,
|
||||
data: Record<string, unknown> | undefined,
|
||||
) => {
|
||||
if (!data) return { ...accumulated };
|
||||
const next = { ...accumulated };
|
||||
Object.entries(data).forEach(([key, values]) => {
|
||||
const nextValues = Array.isArray(values) ? values : [values];
|
||||
if (next[key]) {
|
||||
next[key] = [...next[key], ...nextValues];
|
||||
} else {
|
||||
next[key] = [...nextValues];
|
||||
}
|
||||
});
|
||||
return next;
|
||||
};
|
||||
@@ -10,8 +10,6 @@ import {
|
||||
import { Node } from "@/app/api/__generated__/models/node";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import { NodeExecutionResultInputData } from "@/app/api/__generated__/models/nodeExecutionResultInputData";
|
||||
import { NodeExecutionResultOutputData } from "@/app/api/__generated__/models/nodeExecutionResultOutputData";
|
||||
import { useHistoryStore } from "./historyStore";
|
||||
import { useEdgeStore } from "./edgeStore";
|
||||
import { BlockUIType } from "../components/types";
|
||||
@@ -20,10 +18,31 @@ import {
|
||||
ensurePathExists,
|
||||
parseHandleIdToPath,
|
||||
} from "@/components/renderers/InputRenderer/helpers";
|
||||
import { accumulateExecutionData } from "./helpers";
|
||||
import { NodeResolutionData } from "./types";
|
||||
import { IncompatibilityInfo } from "../hooks/useSubAgentUpdate/types";
|
||||
|
||||
// Resolution mode data stored per node
|
||||
export type NodeResolutionData = {
|
||||
incompatibilities: IncompatibilityInfo;
|
||||
// The NEW schema from the update (what we're updating TO)
|
||||
pendingUpdate: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
// The OLD schema before the update (what we're updating FROM)
|
||||
// Needed to merge and show removed inputs during resolution
|
||||
currentSchema: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
// The full updated hardcoded values to apply when resolution completes
|
||||
pendingHardcodedValues: Record<string, unknown>;
|
||||
};
|
||||
|
||||
// Minimum movement (in pixels) required before logging position change to history
|
||||
// Prevents spamming history with small movements when clicking on inputs inside blocks
|
||||
const MINIMUM_MOVE_BEFORE_LOG = 50;
|
||||
|
||||
// Track initial positions when drag starts (outside store to avoid re-renders)
|
||||
const dragStartPositions: Record<string, XYPosition> = {};
|
||||
|
||||
let dragStartState: { nodes: CustomNode[]; edges: CustomEdge[] } | null = null;
|
||||
@@ -33,15 +52,6 @@ type NodeStore = {
|
||||
nodeCounter: number;
|
||||
setNodeCounter: (nodeCounter: number) => void;
|
||||
nodeAdvancedStates: Record<string, boolean>;
|
||||
|
||||
latestNodeInputData: Record<string, NodeExecutionResultInputData | undefined>;
|
||||
latestNodeOutputData: Record<
|
||||
string,
|
||||
NodeExecutionResultOutputData | undefined
|
||||
>;
|
||||
accumulatedNodeInputData: Record<string, Record<string, unknown[]>>;
|
||||
accumulatedNodeOutputData: Record<string, Record<string, unknown[]>>;
|
||||
|
||||
setNodes: (nodes: CustomNode[]) => void;
|
||||
onNodesChange: (changes: NodeChange<CustomNode>[]) => void;
|
||||
addNode: (node: CustomNode) => void;
|
||||
@@ -62,26 +72,12 @@ type NodeStore = {
|
||||
|
||||
updateNodeStatus: (nodeId: string, status: AgentExecutionStatus) => void;
|
||||
getNodeStatus: (nodeId: string) => AgentExecutionStatus | undefined;
|
||||
cleanNodesStatuses: () => void;
|
||||
|
||||
updateNodeExecutionResult: (
|
||||
nodeId: string,
|
||||
result: NodeExecutionResult,
|
||||
) => void;
|
||||
getNodeExecutionResults: (nodeId: string) => NodeExecutionResult[];
|
||||
getLatestNodeInputData: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResultInputData | undefined;
|
||||
getLatestNodeOutputData: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResultOutputData | undefined;
|
||||
getAccumulatedNodeInputData: (nodeId: string) => Record<string, unknown[]>;
|
||||
getAccumulatedNodeOutputData: (nodeId: string) => Record<string, unknown[]>;
|
||||
getLatestNodeExecutionResult: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResult | undefined;
|
||||
clearAllNodeExecutionResults: () => void;
|
||||
|
||||
getNodeExecutionResult: (nodeId: string) => NodeExecutionResult | undefined;
|
||||
getNodeBlockUIType: (nodeId: string) => BlockUIType;
|
||||
hasWebhookNodes: () => boolean;
|
||||
|
||||
@@ -126,10 +122,6 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
nodeCounter: 0,
|
||||
setNodeCounter: (nodeCounter) => set({ nodeCounter }),
|
||||
nodeAdvancedStates: {},
|
||||
latestNodeInputData: {},
|
||||
latestNodeOutputData: {},
|
||||
accumulatedNodeInputData: {},
|
||||
accumulatedNodeOutputData: {},
|
||||
incrementNodeCounter: () =>
|
||||
set((state) => ({
|
||||
nodeCounter: state.nodeCounter + 1,
|
||||
@@ -325,163 +317,18 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
return get().nodes.find((n) => n.id === nodeId)?.data?.status;
|
||||
},
|
||||
|
||||
cleanNodesStatuses: () => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) => ({
|
||||
...n,
|
||||
data: { ...n.data, status: undefined },
|
||||
})),
|
||||
}));
|
||||
},
|
||||
|
||||
updateNodeExecutionResult: (nodeId: string, result: NodeExecutionResult) => {
|
||||
set((state) => {
|
||||
let latestNodeInputData = state.latestNodeInputData;
|
||||
let latestNodeOutputData = state.latestNodeOutputData;
|
||||
let accumulatedNodeInputData = state.accumulatedNodeInputData;
|
||||
let accumulatedNodeOutputData = state.accumulatedNodeOutputData;
|
||||
|
||||
const nodes = state.nodes.map((n) => {
|
||||
if (n.id !== nodeId) return n;
|
||||
|
||||
const existingResults = n.data.nodeExecutionResults || [];
|
||||
const duplicateIndex = existingResults.findIndex(
|
||||
(r) => r.node_exec_id === result.node_exec_id,
|
||||
);
|
||||
|
||||
if (duplicateIndex !== -1) {
|
||||
const oldResult = existingResults[duplicateIndex];
|
||||
const inputDataChanged =
|
||||
JSON.stringify(oldResult.input_data) !==
|
||||
JSON.stringify(result.input_data);
|
||||
const outputDataChanged =
|
||||
JSON.stringify(oldResult.output_data) !==
|
||||
JSON.stringify(result.output_data);
|
||||
|
||||
if (!inputDataChanged && !outputDataChanged) {
|
||||
return n;
|
||||
}
|
||||
|
||||
const updatedResults = [...existingResults];
|
||||
updatedResults[duplicateIndex] = result;
|
||||
|
||||
const recomputedAccumulatedInput = updatedResults.reduce(
|
||||
(acc, r) => accumulateExecutionData(acc, r.input_data),
|
||||
{} as Record<string, unknown[]>,
|
||||
);
|
||||
const recomputedAccumulatedOutput = updatedResults.reduce(
|
||||
(acc, r) => accumulateExecutionData(acc, r.output_data),
|
||||
{} as Record<string, unknown[]>,
|
||||
);
|
||||
|
||||
const mostRecentResult = updatedResults[updatedResults.length - 1];
|
||||
latestNodeInputData = {
|
||||
...latestNodeInputData,
|
||||
[nodeId]: mostRecentResult.input_data,
|
||||
};
|
||||
latestNodeOutputData = {
|
||||
...latestNodeOutputData,
|
||||
[nodeId]: mostRecentResult.output_data,
|
||||
};
|
||||
|
||||
accumulatedNodeInputData = {
|
||||
...accumulatedNodeInputData,
|
||||
[nodeId]: recomputedAccumulatedInput,
|
||||
};
|
||||
accumulatedNodeOutputData = {
|
||||
...accumulatedNodeOutputData,
|
||||
[nodeId]: recomputedAccumulatedOutput,
|
||||
};
|
||||
|
||||
return {
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: updatedResults,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
accumulatedNodeInputData = {
|
||||
...accumulatedNodeInputData,
|
||||
[nodeId]: accumulateExecutionData(
|
||||
accumulatedNodeInputData[nodeId] || {},
|
||||
result.input_data,
|
||||
),
|
||||
};
|
||||
accumulatedNodeOutputData = {
|
||||
...accumulatedNodeOutputData,
|
||||
[nodeId]: accumulateExecutionData(
|
||||
accumulatedNodeOutputData[nodeId] || {},
|
||||
result.output_data,
|
||||
),
|
||||
};
|
||||
|
||||
latestNodeInputData = {
|
||||
...latestNodeInputData,
|
||||
[nodeId]: result.input_data,
|
||||
};
|
||||
latestNodeOutputData = {
|
||||
...latestNodeOutputData,
|
||||
[nodeId]: result.output_data,
|
||||
};
|
||||
|
||||
return {
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: [...existingResults, result],
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
nodes,
|
||||
latestNodeInputData,
|
||||
latestNodeOutputData,
|
||||
accumulatedNodeInputData,
|
||||
accumulatedNodeOutputData,
|
||||
};
|
||||
});
|
||||
},
|
||||
getNodeExecutionResults: (nodeId: string) => {
|
||||
return (
|
||||
get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResults || []
|
||||
);
|
||||
},
|
||||
getLatestNodeInputData: (nodeId: string) => {
|
||||
return get().latestNodeInputData[nodeId];
|
||||
},
|
||||
getLatestNodeOutputData: (nodeId: string) => {
|
||||
return get().latestNodeOutputData[nodeId];
|
||||
},
|
||||
getAccumulatedNodeInputData: (nodeId: string) => {
|
||||
return get().accumulatedNodeInputData[nodeId] || {};
|
||||
},
|
||||
getAccumulatedNodeOutputData: (nodeId: string) => {
|
||||
return get().accumulatedNodeOutputData[nodeId] || {};
|
||||
},
|
||||
getLatestNodeExecutionResult: (nodeId: string) => {
|
||||
const results =
|
||||
get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResults ||
|
||||
[];
|
||||
return results.length > 0 ? results[results.length - 1] : undefined;
|
||||
},
|
||||
clearAllNodeExecutionResults: () => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) => ({
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: [],
|
||||
},
|
||||
})),
|
||||
latestNodeInputData: {},
|
||||
latestNodeOutputData: {},
|
||||
accumulatedNodeInputData: {},
|
||||
accumulatedNodeOutputData: {},
|
||||
nodes: state.nodes.map((n) =>
|
||||
n.id === nodeId
|
||||
? { ...n, data: { ...n.data, nodeExecutionResult: result } }
|
||||
: n,
|
||||
),
|
||||
}));
|
||||
},
|
||||
getNodeExecutionResult: (nodeId: string) => {
|
||||
return get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResult;
|
||||
},
|
||||
getNodeBlockUIType: (nodeId: string) => {
|
||||
return (
|
||||
get().nodes.find((n) => n.id === nodeId)?.data?.uiType ??
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { IncompatibilityInfo } from "../hooks/useSubAgentUpdate/types";
|
||||
|
||||
export type NodeResolutionData = {
|
||||
incompatibilities: IncompatibilityInfo;
|
||||
pendingUpdate: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
currentSchema: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
pendingHardcodedValues: Record<string, unknown>;
|
||||
};
|
||||
@@ -6,40 +6,28 @@ import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
|
||||
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
|
||||
import {
|
||||
PostHogPageViewTracker,
|
||||
PostHogProvider,
|
||||
PostHogUserTracker,
|
||||
} from "@/providers/posthog/posthog-provider";
|
||||
import { LaunchDarklyProvider } from "@/services/feature-flags/feature-flag-provider";
|
||||
import { QueryClientProvider } from "@tanstack/react-query";
|
||||
import { ThemeProvider, ThemeProviderProps } from "next-themes";
|
||||
import { NuqsAdapter } from "nuqs/adapters/next/app";
|
||||
import { Suspense } from "react";
|
||||
|
||||
export function Providers({ children, ...props }: ThemeProviderProps) {
|
||||
const queryClient = getQueryClient();
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<NuqsAdapter>
|
||||
<PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<PostHogUserTracker />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageViewTracker />
|
||||
</Suspense>
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</NuqsAdapter>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
|
||||
@@ -213,23 +213,6 @@ export function parseToolResponse(
|
||||
timestamp: timestamp || new Date(),
|
||||
};
|
||||
}
|
||||
if (responseType === "clarification_needed") {
|
||||
return {
|
||||
type: "clarification_needed",
|
||||
toolName,
|
||||
questions:
|
||||
(parsedResult.questions as Array<{
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}>) || [],
|
||||
message:
|
||||
(parsedResult.message as string) ||
|
||||
"I need more information to proceed.",
|
||||
sessionId: (parsedResult.session_id as string) || "",
|
||||
timestamp: timestamp || new Date(),
|
||||
};
|
||||
}
|
||||
if (responseType === "need_login") {
|
||||
return {
|
||||
type: "login_needed",
|
||||
|
||||
@@ -14,7 +14,6 @@ import { AgentCarouselMessage } from "../AgentCarouselMessage/AgentCarouselMessa
|
||||
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
|
||||
import { AuthPromptWidget } from "../AuthPromptWidget/AuthPromptWidget";
|
||||
import { ChatCredentialsSetup } from "../ChatCredentialsSetup/ChatCredentialsSetup";
|
||||
import { ClarificationQuestionsWidget } from "../ClarificationQuestionsWidget/ClarificationQuestionsWidget";
|
||||
import { ExecutionStartedMessage } from "../ExecutionStartedMessage/ExecutionStartedMessage";
|
||||
import { MarkdownContent } from "../MarkdownContent/MarkdownContent";
|
||||
import { NoResultsMessage } from "../NoResultsMessage/NoResultsMessage";
|
||||
@@ -70,7 +69,6 @@ export function ChatMessage({
|
||||
isToolResponse,
|
||||
isLoginNeeded,
|
||||
isCredentialsNeeded,
|
||||
isClarificationNeeded,
|
||||
} = useChatMessage(message);
|
||||
const displayContent = getDisplayContent(message, isUser);
|
||||
|
||||
@@ -98,18 +96,6 @@ export function ChatMessage({
|
||||
}
|
||||
}
|
||||
|
||||
function handleClarificationAnswers(answers: Record<string, string>) {
|
||||
if (onSendMessage) {
|
||||
const contextMessage = Object.entries(answers)
|
||||
.map(([keyword, answer]) => `${keyword}: ${answer}`)
|
||||
.join("\n");
|
||||
|
||||
onSendMessage(
|
||||
`I have the answers to your questions:\n\n${contextMessage}\n\nPlease proceed with creating the agent.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const handleCopy = useCallback(
|
||||
async function handleCopy() {
|
||||
if (message.type !== "message") return;
|
||||
@@ -155,17 +141,6 @@ export function ChatMessage({
|
||||
);
|
||||
}
|
||||
|
||||
if (isClarificationNeeded && message.type === "clarification_needed") {
|
||||
return (
|
||||
<ClarificationQuestionsWidget
|
||||
questions={message.questions}
|
||||
message={message.message}
|
||||
onSubmitAnswers={handleClarificationAnswers}
|
||||
className={className}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// Render login needed messages
|
||||
if (isLoginNeeded && message.type === "login_needed") {
|
||||
// If user is already logged in, show success message instead of auth prompt
|
||||
|
||||
@@ -91,18 +91,6 @@ export type ChatMessageData =
|
||||
credentialsSchema?: Record<string, any>;
|
||||
message: string;
|
||||
timestamp?: string | Date;
|
||||
}
|
||||
| {
|
||||
type: "clarification_needed";
|
||||
toolName: string;
|
||||
questions: Array<{
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}>;
|
||||
message: string;
|
||||
sessionId: string;
|
||||
timestamp?: string | Date;
|
||||
};
|
||||
|
||||
export function useChatMessage(message: ChatMessageData) {
|
||||
@@ -123,6 +111,5 @@ export function useChatMessage(message: ChatMessageData) {
|
||||
isAgentCarousel: message.type === "agent_carousel",
|
||||
isExecutionStarted: message.type === "execution_started",
|
||||
isInputsNeeded: message.type === "inputs_needed",
|
||||
isClarificationNeeded: message.type === "clarification_needed",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Card } from "@/components/atoms/Card/Card";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { CheckCircleIcon, QuestionIcon } from "@phosphor-icons/react";
|
||||
import { useState } from "react";
|
||||
|
||||
export interface ClarifyingQuestion {
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
questions: ClarifyingQuestion[];
|
||||
message: string;
|
||||
onSubmitAnswers: (answers: Record<string, string>) => void;
|
||||
onCancel?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function ClarificationQuestionsWidget({
|
||||
questions,
|
||||
message,
|
||||
onSubmitAnswers,
|
||||
onCancel,
|
||||
className,
|
||||
}: Props) {
|
||||
const [answers, setAnswers] = useState<Record<string, string>>({});
|
||||
|
||||
function handleAnswerChange(keyword: string, value: string) {
|
||||
setAnswers((prev) => ({ ...prev, [keyword]: value }));
|
||||
}
|
||||
|
||||
function handleSubmit() {
|
||||
// Check if all questions are answered
|
||||
const allAnswered = questions.every((q) => answers[q.keyword]?.trim());
|
||||
if (!allAnswered) {
|
||||
return;
|
||||
}
|
||||
onSubmitAnswers(answers);
|
||||
}
|
||||
|
||||
const allAnswered = questions.every((q) => answers[q.keyword]?.trim());
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"group relative flex w-full justify-start gap-3 px-4 py-3",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<div className="flex w-full max-w-3xl gap-3">
|
||||
<div className="flex-shrink-0">
|
||||
<div className="flex h-7 w-7 items-center justify-center rounded-lg bg-indigo-500">
|
||||
<QuestionIcon className="h-4 w-4 text-indigo-50" weight="bold" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex min-w-0 flex-1 flex-col">
|
||||
<Card className="space-y-4 p-4">
|
||||
<div>
|
||||
<Text variant="h4" className="mb-1 text-slate-900">
|
||||
I need more information
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-600">
|
||||
{message}
|
||||
</Text>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
{questions.map((q, index) => {
|
||||
const isAnswered = !!answers[q.keyword]?.trim();
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${q.keyword}-${index}`}
|
||||
className={cn(
|
||||
"relative rounded-lg border p-3",
|
||||
isAnswered
|
||||
? "border-green-500 bg-green-50/50"
|
||||
: "border-slate-200 bg-white/50",
|
||||
)}
|
||||
>
|
||||
<div className="mb-2 flex items-start gap-2">
|
||||
{isAnswered ? (
|
||||
<CheckCircleIcon
|
||||
size={16}
|
||||
className="mt-0.5 text-green-500"
|
||||
weight="bold"
|
||||
/>
|
||||
) : (
|
||||
<div className="mt-0.5 flex h-4 w-4 items-center justify-center rounded-full border border-slate-300 bg-white text-xs text-slate-500">
|
||||
{index + 1}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex-1">
|
||||
<Text
|
||||
variant="small"
|
||||
className="mb-2 font-semibold text-slate-900"
|
||||
>
|
||||
{q.question}
|
||||
</Text>
|
||||
{q.example && (
|
||||
<Text
|
||||
variant="small"
|
||||
className="mb-2 italic text-slate-500"
|
||||
>
|
||||
Example: {q.example}
|
||||
</Text>
|
||||
)}
|
||||
<Input
|
||||
type="textarea"
|
||||
id={`clarification-${q.keyword}-${index}`}
|
||||
label={q.question}
|
||||
hideLabel
|
||||
placeholder="Your answer..."
|
||||
rows={2}
|
||||
value={answers[q.keyword] || ""}
|
||||
onChange={(e) =>
|
||||
handleAnswerChange(q.keyword, e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
onClick={handleSubmit}
|
||||
disabled={!allAnswered}
|
||||
className="flex-1"
|
||||
variant="primary"
|
||||
>
|
||||
Submit Answers
|
||||
</Button>
|
||||
{onCancel && (
|
||||
<Button onClick={onCancel} variant="outline">
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { environment } from "@/services/environment";
|
||||
import { PostHogProvider as PHProvider } from "@posthog/react";
|
||||
import { usePathname, useSearchParams } from "next/navigation";
|
||||
import posthog from "posthog-js";
|
||||
import { ReactNode, useEffect, useRef } from "react";
|
||||
|
||||
export function PostHogProvider({ children }: { children: ReactNode }) {
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
|
||||
useEffect(() => {
|
||||
if (process.env.NEXT_PUBLIC_POSTHOG_KEY) {
|
||||
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, {
|
||||
api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
|
||||
defaults: "2025-11-30",
|
||||
capture_pageview: false,
|
||||
capture_pageleave: true,
|
||||
autocapture: true,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
if (!isPostHogEnabled) return <>{children}</>;
|
||||
|
||||
return <PHProvider client={posthog}>{children}</PHProvider>;
|
||||
}
|
||||
|
||||
export function PostHogUserTracker() {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const previousUserIdRef = useRef<string | null>(null);
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
|
||||
useEffect(() => {
|
||||
if (isUserLoading || !isPostHogEnabled) return;
|
||||
|
||||
if (user) {
|
||||
if (previousUserIdRef.current !== user.id) {
|
||||
posthog.identify(user.id, {
|
||||
email: user.email,
|
||||
...(user.user_metadata?.name && { name: user.user_metadata.name }),
|
||||
});
|
||||
previousUserIdRef.current = user.id;
|
||||
}
|
||||
} else if (previousUserIdRef.current !== null) {
|
||||
posthog.reset();
|
||||
previousUserIdRef.current = null;
|
||||
}
|
||||
}, [user, isUserLoading, isPostHogEnabled]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function PostHogPageViewTracker() {
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname && isPostHogEnabled) {
|
||||
let url = window.origin + pathname;
|
||||
if (searchParams && searchParams.toString()) {
|
||||
url = url + `?${searchParams.toString()}`;
|
||||
}
|
||||
posthog.capture("$pageview", { $current_url: url });
|
||||
}
|
||||
}, [pathname, searchParams, isPostHogEnabled]);
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -76,13 +76,6 @@ function getPreviewStealingDev() {
|
||||
return branch;
|
||||
}
|
||||
|
||||
function getPostHogCredentials() {
|
||||
return {
|
||||
key: process.env.NEXT_PUBLIC_POSTHOG_KEY,
|
||||
host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
|
||||
};
|
||||
}
|
||||
|
||||
function isProductionBuild() {
|
||||
return process.env.NODE_ENV === "production";
|
||||
}
|
||||
@@ -123,13 +116,6 @@ function areFeatureFlagsEnabled() {
|
||||
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "enabled";
|
||||
}
|
||||
|
||||
function isPostHogEnabled() {
|
||||
const inCloud = isCloud();
|
||||
const key = process.env.NEXT_PUBLIC_POSTHOG_KEY;
|
||||
const host = process.env.NEXT_PUBLIC_POSTHOG_HOST;
|
||||
return inCloud && key && host;
|
||||
}
|
||||
|
||||
export const environment = {
|
||||
// Generic
|
||||
getEnvironmentStr,
|
||||
@@ -142,7 +128,6 @@ export const environment = {
|
||||
getSupabaseUrl,
|
||||
getSupabaseAnonKey,
|
||||
getPreviewStealingDev,
|
||||
getPostHogCredentials,
|
||||
// Assertions
|
||||
isServerSide,
|
||||
isClientSide,
|
||||
@@ -153,6 +138,5 @@ export const environment = {
|
||||
isCloud,
|
||||
isLocal,
|
||||
isVercelPreview,
|
||||
isPostHogEnabled,
|
||||
areFeatureFlagsEnabled,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user